88658 lines
3.8 MiB
88658 lines
3.8 MiB
// Copyright 2015-2022 The Khronos Group Inc.
|
|
//
|
|
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
//
|
|
|
|
// This header is generated from the Khronos Vulkan XML API Registry.
|
|
|
|
#ifndef VULKAN_STRUCTS_HPP
|
|
# define VULKAN_STRUCTS_HPP
|
|
|
|
namespace VULKAN_HPP_NAMESPACE
|
|
{
|
|
|
|
//===============
|
|
//=== STRUCTS ===
|
|
//===============
|
|
|
|
|
|
struct AabbPositionsKHR
|
|
{
|
|
using NativeType = VkAabbPositionsKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minX = minX_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minY = minY_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minZ = minZ_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxX = maxX_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxY = maxY_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxZ = maxZ_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAabbPositionsKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( minX, minY, minZ, maxX, maxY, maxZ );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AabbPositionsKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( minX == rhs.minX )
|
|
&& ( minY == rhs.minY )
|
|
&& ( minZ == rhs.minZ )
|
|
&& ( maxX == rhs.maxX )
|
|
&& ( maxY == rhs.maxY )
|
|
&& ( maxZ == rhs.maxZ );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float minX = {};
|
|
float minY = {};
|
|
float minZ = {};
|
|
float maxX = {};
|
|
float maxY = {};
|
|
float maxZ = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "AabbPositionsKHR is not nothrow_move_constructible!" );
|
|
using AabbPositionsNV = AabbPositionsKHR;
|
|
|
|
union DeviceOrHostAddressConstKHR
|
|
{
|
|
using NativeType = VkDeviceOrHostAddressConstKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
|
|
: deviceAddress( deviceAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ )
|
|
: hostAddress( hostAddress_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceAddress = deviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostAddress = hostAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkDeviceOrHostAddressConstKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>( this );
|
|
}
|
|
|
|
operator VkDeviceOrHostAddressConstKHR &()
|
|
{
|
|
return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
|
|
const void * hostAddress;
|
|
#else
|
|
VkDeviceAddress deviceAddress;
|
|
const void * hostAddress;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct AccelerationStructureGeometryTrianglesDataKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), maxVertex( maxVertex_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexFormat = vertexFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexData = vertexData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexStride = vertexStride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxVertex = maxVertex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexType = indexType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexData = indexData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformData = transformData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
|
|
uint32_t maxVertex = {};
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value, "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
|
|
{
|
|
using Type = AccelerationStructureGeometryTrianglesDataKHR;
|
|
};
|
|
|
|
struct AccelerationStructureGeometryAabbsDataKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryAabbsDataKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: data( data_ ), stride( stride_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
data = data_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, data, stride );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value, "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
|
|
{
|
|
using Type = AccelerationStructureGeometryAabbsDataKHR;
|
|
};
|
|
|
|
struct AccelerationStructureGeometryInstancesDataKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryInstancesDataKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: arrayOfPointers( arrayOfPointers_ ), data( data_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayOfPointers = arrayOfPointers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
data = data_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, arrayOfPointers, data );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value, "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
|
|
{
|
|
using Type = AccelerationStructureGeometryInstancesDataKHR;
|
|
};
|
|
|
|
union AccelerationStructureGeometryDataKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryDataKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
|
|
: triangles( triangles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
|
|
: aabbs( aabbs_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
|
|
: instances( instances_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
triangles = triangles_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aabbs = aabbs_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instances = instances_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkAccelerationStructureGeometryDataKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>( this );
|
|
}
|
|
|
|
operator VkAccelerationStructureGeometryDataKHR &()
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
|
|
#else
|
|
VkAccelerationStructureGeometryTrianglesDataKHR triangles;
|
|
VkAccelerationStructureGeometryAabbsDataKHR aabbs;
|
|
VkAccelerationStructureGeometryInstancesDataKHR instances;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct AccelerationStructureGeometryKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryType = geometryType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometry = geometry_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, geometryType, geometry, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
|
|
VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
|
|
{
|
|
using Type = AccelerationStructureGeometryKHR;
|
|
};
|
|
|
|
union DeviceOrHostAddressKHR
|
|
{
|
|
using NativeType = VkDeviceOrHostAddressKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
|
|
: deviceAddress( deviceAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ )
|
|
: hostAddress( hostAddress_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceAddress = deviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostAddress = hostAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkDeviceOrHostAddressKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>( this );
|
|
}
|
|
|
|
operator VkDeviceOrHostAddressKHR &()
|
|
{
|
|
return *reinterpret_cast<VkDeviceOrHostAddressKHR*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
|
|
void * hostAddress;
|
|
#else
|
|
VkDeviceAddress deviceAddress;
|
|
void * hostAddress;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct AccelerationStructureBuildGeometryInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureBuildGeometryInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} )
|
|
: type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ), pGeometries( geometries_.data() ), ppGeometries( pGeometries_.data() ), scratchData( scratchData_ )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1);
|
|
#else
|
|
if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccelerationStructure = srcAccelerationStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccelerationStructure = dstAccelerationStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryCount = geometryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pGeometries = pGeometries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AccelerationStructureBuildGeometryInfoKHR & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryCount = static_cast<uint32_t>( geometries_.size() );
|
|
pGeometries = geometries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppGeometries = ppGeometries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AccelerationStructureBuildGeometryInfoKHR & setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryCount = static_cast<uint32_t>( pGeometries_.size() );
|
|
ppGeometries = pGeometries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scratchData = scratchData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
|
|
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
|
|
uint32_t geometryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {};
|
|
const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value, "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
|
|
{
|
|
using Type = AccelerationStructureBuildGeometryInfoKHR;
|
|
};
|
|
|
|
struct AccelerationStructureBuildRangeInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureBuildRangeInfoKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveCount = primitiveCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveOffset = primitiveOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstVertex = firstVertex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformOffset = transformOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( primitiveCount == rhs.primitiveCount )
|
|
&& ( primitiveOffset == rhs.primitiveOffset )
|
|
&& ( firstVertex == rhs.firstVertex )
|
|
&& ( transformOffset == rhs.transformOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t primitiveCount = {};
|
|
uint32_t primitiveOffset = {};
|
|
uint32_t firstVertex = {};
|
|
uint32_t transformOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value, "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
struct AccelerationStructureBuildSizesInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureBuildSizesInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructureSize( accelerationStructureSize_ ), updateScratchSize( updateScratchSize_ ), buildScratchSize( buildScratchSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureSize = accelerationStructureSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
updateScratchSize = updateScratchSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buildScratchSize = buildScratchSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructureSize == rhs.accelerationStructureSize )
|
|
&& ( updateScratchSize == rhs.updateScratchSize )
|
|
&& ( buildScratchSize == rhs.buildScratchSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value, "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
|
|
{
|
|
using Type = AccelerationStructureBuildSizesInfoKHR;
|
|
};
|
|
|
|
struct AccelerationStructureCreateInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: createFlags( createFlags_ ), buffer( buffer_ ), offset( offset_ ), size( size_ ), type( type_ ), deviceAddress( deviceAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
createFlags = createFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceAddress = deviceAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( createFlags == rhs.createFlags )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size )
|
|
&& ( type == rhs.type )
|
|
&& ( deviceAddress == rhs.deviceAddress );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value, "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
|
|
{
|
|
using Type = AccelerationStructureCreateInfoKHR;
|
|
};
|
|
|
|
struct GeometryTrianglesNV
|
|
{
|
|
using NativeType = VkGeometryTrianglesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexData = vertexData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexOffset = vertexOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexCount = vertexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexStride = vertexStride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexFormat = vertexFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexData = indexData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexOffset = indexOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexCount = indexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexType = indexType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformData = transformData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformOffset = transformOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexData, vertexOffset, vertexCount, vertexStride, vertexFormat, indexData, indexOffset, indexCount, indexType, transformData, transformOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeometryTrianglesNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vertexData == rhs.vertexData )
|
|
&& ( vertexOffset == rhs.vertexOffset )
|
|
&& ( vertexCount == rhs.vertexCount )
|
|
&& ( vertexStride == rhs.vertexStride )
|
|
&& ( vertexFormat == rhs.vertexFormat )
|
|
&& ( indexData == rhs.indexData )
|
|
&& ( indexOffset == rhs.indexOffset )
|
|
&& ( indexCount == rhs.indexCount )
|
|
&& ( indexType == rhs.indexType )
|
|
&& ( transformData == rhs.transformData )
|
|
&& ( transformOffset == rhs.transformOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
|
|
uint32_t vertexCount = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
|
|
VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Buffer indexData = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
|
|
uint32_t indexCount = {};
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
|
|
VULKAN_HPP_NAMESPACE::Buffer transformData = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "GeometryTrianglesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
|
|
{
|
|
using Type = GeometryTrianglesNV;
|
|
};
|
|
|
|
struct GeometryAABBNV
|
|
{
|
|
using NativeType = VkGeometryAABBNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aabbData = aabbData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
numAABBs = numAABBs_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeometryAABBNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeometryAABBNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, aabbData, numAABBs, stride, offset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeometryAABBNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( aabbData == rhs.aabbData )
|
|
&& ( numAABBs == rhs.numAABBs )
|
|
&& ( stride == rhs.stride )
|
|
&& ( offset == rhs.offset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
|
|
uint32_t numAABBs = {};
|
|
uint32_t stride = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "GeometryAABBNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGeometryAabbNV>
|
|
{
|
|
using Type = GeometryAABBNV;
|
|
};
|
|
|
|
struct GeometryDataNV
|
|
{
|
|
using NativeType = VkGeometryDataNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: triangles( triangles_ ), aabbs( aabbs_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
triangles = triangles_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aabbs = aabbs_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeometryDataNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeometryDataNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const &, VULKAN_HPP_NAMESPACE::GeometryAABBNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( triangles, aabbs );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeometryDataNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( triangles == rhs.triangles )
|
|
&& ( aabbs == rhs.aabbs );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
|
|
VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "GeometryDataNV is not nothrow_move_constructible!" );
|
|
|
|
struct GeometryNV
|
|
{
|
|
using NativeType = VkGeometryNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryType = geometryType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometry = geometry_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeometryNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeometryNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::GeometryTypeKHR const &, VULKAN_HPP_NAMESPACE::GeometryDataNV const &, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, geometryType, geometry, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeometryNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( geometryType == rhs.geometryType )
|
|
&& ( geometry == rhs.geometry )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
|
|
VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
|
|
VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "GeometryNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGeometryNV>
|
|
{
|
|
using Type = GeometryNV;
|
|
};
|
|
|
|
struct AccelerationStructureInfoNV
|
|
{
|
|
using NativeType = VkAccelerationStructureInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
|
|
: type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCount = instanceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryCount = geometryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pGeometries = pGeometries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryCount = static_cast<uint32_t>( geometries_.size() );
|
|
pGeometries = geometries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV const &, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GeometryNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type )
|
|
&& ( flags == rhs.flags )
|
|
&& ( instanceCount == rhs.instanceCount )
|
|
&& ( geometryCount == rhs.geometryCount )
|
|
&& ( pGeometries == rhs.pGeometries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
|
|
VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
|
|
uint32_t instanceCount = {};
|
|
uint32_t geometryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "AccelerationStructureInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
|
|
{
|
|
using Type = AccelerationStructureInfoNV;
|
|
};
|
|
|
|
struct AccelerationStructureCreateInfoNV
|
|
{
|
|
using NativeType = VkAccelerationStructureCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: compactedSize( compactedSize_ ), info( info_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compactedSize = compactedSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
info = info_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, compactedSize, info );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( compactedSize == rhs.compactedSize )
|
|
&& ( info == rhs.info );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
|
|
{
|
|
using Type = AccelerationStructureCreateInfoNV;
|
|
};
|
|
|
|
struct AccelerationStructureDeviceAddressInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureDeviceAddressInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructure( accelerationStructure_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructure = accelerationStructure_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructure );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructure == rhs.accelerationStructure );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value, "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
|
|
{
|
|
using Type = AccelerationStructureDeviceAddressInfoKHR;
|
|
};
|
|
|
|
struct AccelerationStructureGeometryMotionTrianglesDataNV
|
|
{
|
|
using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexData( vertexData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast<AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureGeometryMotionTrianglesDataNV & operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexData = vertexData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureGeometryMotionTrianglesDataNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexData );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) == sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value, "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV>
|
|
{
|
|
using Type = AccelerationStructureGeometryMotionTrianglesDataNV;
|
|
};
|
|
|
|
struct TransformMatrixKHR
|
|
{
|
|
using NativeType = VkTransformMatrixKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const & matrix_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: matrix( matrix_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
matrix = matrix_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkTransformMatrixKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( matrix );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( TransformMatrixKHR const & ) const = default;
|
|
#else
|
|
bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( matrix == rhs.matrix );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "TransformMatrixKHR is not nothrow_move_constructible!" );
|
|
using TransformMatrixNV = TransformMatrixKHR;
|
|
|
|
struct AccelerationStructureInstanceKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureInstanceKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCustomIndex = instanceCustomIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mask = mask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
|
|
return *this;
|
|
}
|
|
|
|
AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureReference = accelerationStructureReference_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( transform == rhs.transform )
|
|
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
|
|
&& ( mask == rhs.mask )
|
|
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
|
|
&& ( flags == rhs.flags )
|
|
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
|
|
uint32_t instanceCustomIndex : 24;
|
|
uint32_t mask : 8;
|
|
uint32_t instanceShaderBindingTableRecordOffset : 24;
|
|
VkGeometryInstanceFlagsKHR flags : 8;
|
|
uint64_t accelerationStructureReference = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" );
|
|
using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
|
|
|
|
struct AccelerationStructureMatrixMotionInstanceNV
|
|
{
|
|
using NativeType = VkAccelerationStructureMatrixMotionInstanceNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: transformT0( transformT0_ ), transformT1( transformT1_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast<AccelerationStructureMatrixMotionInstanceNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformT0 = transformT0_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformT1 = transformT1_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCustomIndex = instanceCustomIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mask = mask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
|
|
return *this;
|
|
}
|
|
|
|
AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureReference = accelerationStructureReference_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureMatrixMotionInstanceNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureMatrixMotionInstanceNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( transformT0 == rhs.transformT0 )
|
|
&& ( transformT1 == rhs.transformT1 )
|
|
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
|
|
&& ( mask == rhs.mask )
|
|
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
|
|
&& ( flags == rhs.flags )
|
|
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {};
|
|
VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {};
|
|
uint32_t instanceCustomIndex : 24;
|
|
uint32_t mask : 8;
|
|
uint32_t instanceShaderBindingTableRecordOffset : 24;
|
|
VkGeometryInstanceFlagsKHR flags : 8;
|
|
uint64_t accelerationStructureReference = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) == sizeof( VkAccelerationStructureMatrixMotionInstanceNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value, "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" );
|
|
|
|
struct AccelerationStructureMemoryRequirementsInfoNV
|
|
{
|
|
using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), accelerationStructure( accelerationStructure_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructure = accelerationStructure_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, accelerationStructure );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type )
|
|
&& ( accelerationStructure == rhs.accelerationStructure );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value, "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
|
|
{
|
|
using Type = AccelerationStructureMemoryRequirementsInfoNV;
|
|
};
|
|
|
|
struct AccelerationStructureMotionInfoNV
|
|
{
|
|
using NativeType = VkAccelerationStructureMotionInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxInstances( maxInstances_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureMotionInfoNV( *reinterpret_cast<AccelerationStructureMotionInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxInstances = maxInstances_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureMotionInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureMotionInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxInstances, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxInstances == rhs.maxInstances )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t maxInstances = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureMotionInfoNV>
|
|
{
|
|
using Type = AccelerationStructureMotionInfoNV;
|
|
};
|
|
|
|
struct SRTDataNV
|
|
{
|
|
using NativeType = VkSRTDataNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SRTDataNV(float sx_ = {}, float a_ = {}, float b_ = {}, float pvx_ = {}, float sy_ = {}, float c_ = {}, float pvy_ = {}, float sz_ = {}, float pvz_ = {}, float qx_ = {}, float qy_ = {}, float qz_ = {}, float qw_ = {}, float tx_ = {}, float ty_ = {}, float tz_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sx( sx_ ), a( a_ ), b( b_ ), pvx( pvx_ ), sy( sy_ ), c( c_ ), pvy( pvy_ ), sz( sz_ ), pvz( pvz_ ), qx( qx_ ), qy( qy_ ), qz( qz_ ), qw( qw_ ), tx( tx_ ), ty( ty_ ), tz( tz_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SRTDataNV( *reinterpret_cast<SRTDataNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SRTDataNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sx = sx_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
a = a_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
b = b_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pvx = pvx_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sy = sy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
c = c_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pvy = pvy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sz = sz_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pvz = pvz_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qx = qx_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qy = qy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qz = qz_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qw = qw_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tx = tx_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ty = ty_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tz = tz_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSRTDataNV*>( this );
|
|
}
|
|
|
|
explicit operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSRTDataNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SRTDataNV const & ) const = default;
|
|
#else
|
|
bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sx == rhs.sx )
|
|
&& ( a == rhs.a )
|
|
&& ( b == rhs.b )
|
|
&& ( pvx == rhs.pvx )
|
|
&& ( sy == rhs.sy )
|
|
&& ( c == rhs.c )
|
|
&& ( pvy == rhs.pvy )
|
|
&& ( sz == rhs.sz )
|
|
&& ( pvz == rhs.pvz )
|
|
&& ( qx == rhs.qx )
|
|
&& ( qy == rhs.qy )
|
|
&& ( qz == rhs.qz )
|
|
&& ( qw == rhs.qw )
|
|
&& ( tx == rhs.tx )
|
|
&& ( ty == rhs.ty )
|
|
&& ( tz == rhs.tz );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float sx = {};
|
|
float a = {};
|
|
float b = {};
|
|
float pvx = {};
|
|
float sy = {};
|
|
float c = {};
|
|
float pvy = {};
|
|
float sz = {};
|
|
float pvz = {};
|
|
float qx = {};
|
|
float qy = {};
|
|
float qz = {};
|
|
float qw = {};
|
|
float tx = {};
|
|
float ty = {};
|
|
float tz = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "SRTDataNV is not nothrow_move_constructible!" );
|
|
|
|
struct AccelerationStructureSRTMotionInstanceNV
|
|
{
|
|
using NativeType = VkAccelerationStructureSRTMotionInstanceNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: transformT0( transformT0_ ), transformT1( transformT1_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast<AccelerationStructureSRTMotionInstanceNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformT0 = transformT0_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformT1 = transformT1_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCustomIndex = instanceCustomIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mask = mask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
|
|
return *this;
|
|
}
|
|
|
|
AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureReference = accelerationStructureReference_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureSRTMotionInstanceNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureSRTMotionInstanceNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::SRTDataNV const &, VULKAN_HPP_NAMESPACE::SRTDataNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VkGeometryInstanceFlagsKHR const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( transformT0 == rhs.transformT0 )
|
|
&& ( transformT1 == rhs.transformT1 )
|
|
&& ( instanceCustomIndex == rhs.instanceCustomIndex )
|
|
&& ( mask == rhs.mask )
|
|
&& ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
|
|
&& ( flags == rhs.flags )
|
|
&& ( accelerationStructureReference == rhs.accelerationStructureReference );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {};
|
|
VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {};
|
|
uint32_t instanceCustomIndex : 24;
|
|
uint32_t mask : 8;
|
|
uint32_t instanceShaderBindingTableRecordOffset : 24;
|
|
VkGeometryInstanceFlagsKHR flags : 8;
|
|
uint64_t accelerationStructureReference = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value, "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" );
|
|
|
|
union AccelerationStructureMotionInstanceDataNV
|
|
{
|
|
using NativeType = VkAccelerationStructureMotionInstanceDataNV;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} )
|
|
: staticInstance( staticInstance_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ )
|
|
: matrixMotionInstance( matrixMotionInstance_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ )
|
|
: srtMotionInstance( srtMotionInstance_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
staticInstance = staticInstance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
matrixMotionInstance = matrixMotionInstance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srtMotionInstance = srtMotionInstance_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkAccelerationStructureMotionInstanceDataNV const &() const
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureMotionInstanceDataNV*>( this );
|
|
}
|
|
|
|
operator VkAccelerationStructureMotionInstanceDataNV &()
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureMotionInstanceDataNV*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance;
|
|
#else
|
|
VkAccelerationStructureInstanceKHR staticInstance;
|
|
VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance;
|
|
VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct AccelerationStructureMotionInstanceNV
|
|
{
|
|
using NativeType = VkAccelerationStructureMotionInstanceNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), flags( flags_ ), data( data_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureMotionInstanceNV( *reinterpret_cast<AccelerationStructureMotionInstanceNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
data = data_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureMotionInstanceNV*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureMotionInstanceNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV const &, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( type, flags, data );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic;
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value, "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" );
|
|
|
|
struct AccelerationStructureVersionInfoKHR
|
|
{
|
|
using NativeType = VkAccelerationStructureVersionInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(const uint8_t * pVersionData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pVersionData( pVersionData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVersionData = pVersionData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const uint8_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pVersionData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pVersionData == rhs.pVersionData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR;
|
|
const void * pNext = {};
|
|
const uint8_t * pVersionData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value, "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
|
|
{
|
|
using Type = AccelerationStructureVersionInfoKHR;
|
|
};
|
|
|
|
struct AcquireNextImageInfoKHR
|
|
{
|
|
using NativeType = VkAcquireNextImageInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timeout = timeout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint64_t const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::Fence const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchain == rhs.swapchain )
|
|
&& ( timeout == rhs.timeout )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( fence == rhs.fence )
|
|
&& ( deviceMask == rhs.deviceMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
uint64_t timeout = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
uint32_t deviceMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "AcquireNextImageInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
|
|
{
|
|
using Type = AcquireNextImageInfoKHR;
|
|
};
|
|
|
|
struct AcquireProfilingLockInfoKHR
|
|
{
|
|
using NativeType = VkAcquireProfilingLockInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), timeout( timeout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timeout = timeout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, timeout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( timeout == rhs.timeout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
|
|
uint64_t timeout = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
|
|
{
|
|
using Type = AcquireProfilingLockInfoKHR;
|
|
};
|
|
|
|
struct AllocationCallbacks
|
|
{
|
|
using NativeType = VkAllocationCallbacks;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AllocationCallbacks(void * pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnAllocation = pfnAllocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnReallocation = pfnReallocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnFree = pfnFree_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnInternalAllocation = pfnInternalAllocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnInternalFree = pfnInternalFree_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAllocationCallbacks*>( this );
|
|
}
|
|
|
|
explicit operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAllocationCallbacks*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<void * const &, PFN_vkAllocationFunction const &, PFN_vkReallocationFunction const &, PFN_vkFreeFunction const &, PFN_vkInternalAllocationNotification const &, PFN_vkInternalFreeNotification const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AllocationCallbacks const & ) const = default;
|
|
#else
|
|
bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( pUserData == rhs.pUserData )
|
|
&& ( pfnAllocation == rhs.pfnAllocation )
|
|
&& ( pfnReallocation == rhs.pfnReallocation )
|
|
&& ( pfnFree == rhs.pfnFree )
|
|
&& ( pfnInternalAllocation == rhs.pfnInternalAllocation )
|
|
&& ( pfnInternalFree == rhs.pfnInternalFree );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
void * pUserData = {};
|
|
PFN_vkAllocationFunction pfnAllocation = {};
|
|
PFN_vkReallocationFunction pfnReallocation = {};
|
|
PFN_vkFreeFunction pfnFree = {};
|
|
PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
|
|
PFN_vkInternalFreeNotification pfnInternalFree = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "AllocationCallbacks is not nothrow_move_constructible!" );
|
|
|
|
struct ComponentMapping
|
|
{
|
|
using NativeType = VkComponentMapping;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
|
|
: r( r_ ), g( g_ ), b( b_ ), a( a_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
r = r_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
g = g_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
b = b_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
a = a_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkComponentMapping*>( this );
|
|
}
|
|
|
|
explicit operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkComponentMapping*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &, VULKAN_HPP_NAMESPACE::ComponentSwizzle const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( r, g, b, a );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ComponentMapping const & ) const = default;
|
|
#else
|
|
bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( r == rhs.r )
|
|
&& ( g == rhs.g )
|
|
&& ( b == rhs.b )
|
|
&& ( a == rhs.a );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "ComponentMapping is not nothrow_move_constructible!" );
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct AndroidHardwareBufferFormatProperties2ANDROID
|
|
{
|
|
using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast<AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAndroidHardwareBufferFormatProperties2ANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( externalFormat == rhs.externalFormat )
|
|
&& ( formatFeatures == rhs.formatFeatures )
|
|
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
|
|
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
|
|
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
|
|
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
|
|
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint64_t externalFormat = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) == sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value, "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatProperties2ANDROID>
|
|
{
|
|
using Type = AndroidHardwareBufferFormatProperties2ANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct AndroidHardwareBufferFormatPropertiesANDROID
|
|
{
|
|
using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( externalFormat == rhs.externalFormat )
|
|
&& ( formatFeatures == rhs.formatFeatures )
|
|
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
|
|
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
|
|
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
|
|
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
|
|
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint64_t externalFormat = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value, "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
|
|
{
|
|
using Type = AndroidHardwareBufferFormatPropertiesANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct AndroidHardwareBufferPropertiesANDROID
|
|
{
|
|
using NativeType = VkAndroidHardwareBufferPropertiesANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allocationSize, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( allocationSize == rhs.allocationSize )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value, "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
|
|
{
|
|
using Type = AndroidHardwareBufferPropertiesANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct AndroidHardwareBufferUsageANDROID
|
|
{
|
|
using NativeType = VkAndroidHardwareBufferUsageANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: androidHardwareBufferUsage( androidHardwareBufferUsage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, androidHardwareBufferUsage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
|
|
void * pNext = {};
|
|
uint64_t androidHardwareBufferUsage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
|
|
{
|
|
using Type = AndroidHardwareBufferUsageANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct AndroidSurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkAndroidSurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), window( window_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
window = window_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR const &, struct ANativeWindow * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, window );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( window == rhs.window );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
|
|
struct ANativeWindow * window = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
|
|
{
|
|
using Type = AndroidSurfaceCreateInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
struct ApplicationInfo
|
|
{
|
|
using NativeType = VkApplicationInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ApplicationInfo(const char * pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char * pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pApplicationName = pApplicationName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
applicationVersion = applicationVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEngineName = pEngineName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
engineVersion = engineVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
apiVersion = apiVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkApplicationInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkApplicationInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, uint32_t const &, const char * const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( pApplicationName != rhs.pApplicationName )
|
|
if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) return cmp;
|
|
if ( pEngineName != rhs.pEngineName )
|
|
if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) return cmp;
|
|
if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) )
|
|
&& ( applicationVersion == rhs.applicationVersion )
|
|
&& ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) )
|
|
&& ( engineVersion == rhs.engineVersion )
|
|
&& ( apiVersion == rhs.apiVersion );
|
|
}
|
|
|
|
bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
|
|
const void * pNext = {};
|
|
const char * pApplicationName = {};
|
|
uint32_t applicationVersion = {};
|
|
const char * pEngineName = {};
|
|
uint32_t engineVersion = {};
|
|
uint32_t apiVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "ApplicationInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eApplicationInfo>
|
|
{
|
|
using Type = ApplicationInfo;
|
|
};
|
|
|
|
struct AttachmentDescription
|
|
{
|
|
using NativeType = VkAttachmentDescription;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLoadOp = stencilLoadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilStoreOp = stencilStoreOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
finalLayout = finalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescription*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescription*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentDescription const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( flags == rhs.flags )
|
|
&& ( format == rhs.format )
|
|
&& ( samples == rhs.samples )
|
|
&& ( loadOp == rhs.loadOp )
|
|
&& ( storeOp == rhs.storeOp )
|
|
&& ( stencilLoadOp == rhs.stencilLoadOp )
|
|
&& ( stencilStoreOp == rhs.stencilStoreOp )
|
|
&& ( initialLayout == rhs.initialLayout )
|
|
&& ( finalLayout == rhs.finalLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "AttachmentDescription is not nothrow_move_constructible!" );
|
|
|
|
struct AttachmentDescription2
|
|
{
|
|
using NativeType = VkAttachmentDescription2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLoadOp = stencilLoadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilStoreOp = stencilStoreOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
finalLayout = finalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescription2*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescription2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentDescription2 const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( format == rhs.format )
|
|
&& ( samples == rhs.samples )
|
|
&& ( loadOp == rhs.loadOp )
|
|
&& ( storeOp == rhs.storeOp )
|
|
&& ( stencilLoadOp == rhs.stencilLoadOp )
|
|
&& ( stencilStoreOp == rhs.stencilStoreOp )
|
|
&& ( initialLayout == rhs.initialLayout )
|
|
&& ( finalLayout == rhs.finalLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "AttachmentDescription2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentDescription2>
|
|
{
|
|
using Type = AttachmentDescription2;
|
|
};
|
|
using AttachmentDescription2KHR = AttachmentDescription2;
|
|
|
|
struct AttachmentDescriptionStencilLayout
|
|
{
|
|
using NativeType = VkAttachmentDescriptionStencilLayout;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilInitialLayout = stencilInitialLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilFinalLayout = stencilFinalLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stencilInitialLayout == rhs.stencilInitialLayout )
|
|
&& ( stencilFinalLayout == rhs.stencilFinalLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value, "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
|
|
{
|
|
using Type = AttachmentDescriptionStencilLayout;
|
|
};
|
|
using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
|
|
|
|
struct AttachmentReference
|
|
{
|
|
using NativeType = VkAttachmentReference;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: attachment( attachment_ ), layout( layout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachment = attachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReference*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReference*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( attachment, layout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentReference const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( attachment == rhs.attachment )
|
|
&& ( layout == rhs.layout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t attachment = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "AttachmentReference is not nothrow_move_constructible!" );
|
|
|
|
struct AttachmentReference2
|
|
{
|
|
using NativeType = VkAttachmentReference2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachment = attachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReference2*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReference2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachment, layout, aspectMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentReference2 const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( attachment == rhs.attachment )
|
|
&& ( layout == rhs.layout )
|
|
&& ( aspectMask == rhs.aspectMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
|
|
const void * pNext = {};
|
|
uint32_t attachment = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "AttachmentReference2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentReference2>
|
|
{
|
|
using Type = AttachmentReference2;
|
|
};
|
|
using AttachmentReference2KHR = AttachmentReference2;
|
|
|
|
struct AttachmentReferenceStencilLayout
|
|
{
|
|
using NativeType = VkAttachmentReferenceStencilLayout;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: stencilLayout( stencilLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilLayout = stencilLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stencilLayout == rhs.stencilLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
|
|
{
|
|
using Type = AttachmentReferenceStencilLayout;
|
|
};
|
|
using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
|
|
|
|
struct AttachmentSampleCountInfoAMD
|
|
{
|
|
using NativeType = VkAttachmentSampleCountInfoAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD(uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1) VULKAN_HPP_NOEXCEPT
|
|
: colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentSamples( pColorAttachmentSamples_ ), depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentSampleCountInfoAMD( *reinterpret_cast<AttachmentSampleCountInfoAMD const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 )
|
|
: colorAttachmentCount( static_cast<uint32_t>( colorAttachmentSamples_.size() ) ), pColorAttachmentSamples( colorAttachmentSamples_.data() ), depthStencilAttachmentSamples( depthStencilAttachmentSamples_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachmentSamples = pColorAttachmentSamples_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
AttachmentSampleCountInfoAMD & setColorAttachmentSamples( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleCountFlagBits> const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentSamples_.size() );
|
|
pColorAttachmentSamples = colorAttachmentSamples_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthStencilAttachmentSamples = depthStencilAttachmentSamples_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentSampleCountInfoAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentSampleCountInfoAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachmentSamples == rhs.pColorAttachmentSamples )
|
|
&& ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD;
|
|
const void * pNext = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eAttachmentSampleCountInfoAMD>
|
|
{
|
|
using Type = AttachmentSampleCountInfoAMD;
|
|
};
|
|
using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;
|
|
|
|
struct Extent2D
|
|
{
|
|
using NativeType = VkExtent2D;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: width( width_ ), height( height_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtent2D*>( this );
|
|
}
|
|
|
|
explicit operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtent2D*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( width, height );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Extent2D const & ) const = default;
|
|
#else
|
|
bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( width == rhs.width )
|
|
&& ( height == rhs.height );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent2D>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent2D>::value, "Extent2D is not nothrow_move_constructible!" );
|
|
|
|
struct SampleLocationEXT
|
|
{
|
|
using NativeType = VkSampleLocationEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSampleLocationEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSampleLocationEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SampleLocationEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "SampleLocationEXT is not nothrow_move_constructible!" );
|
|
|
|
struct SampleLocationsInfoEXT
|
|
{
|
|
using NativeType = VkSampleLocationsInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ )
|
|
: sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsPerPixel = sampleLocationsPerPixel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationGridSize = sampleLocationGridSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsCount = sampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSampleLocations = pSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
|
|
pSampleLocations = sampleLocations_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
|
|
&& ( sampleLocationGridSize == rhs.sampleLocationGridSize )
|
|
&& ( sampleLocationsCount == rhs.sampleLocationsCount )
|
|
&& ( pSampleLocations == rhs.pSampleLocations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
|
|
uint32_t sampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "SampleLocationsInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
|
|
{
|
|
using Type = SampleLocationsInfoEXT;
|
|
};
|
|
|
|
struct AttachmentSampleLocationsEXT
|
|
{
|
|
using NativeType = VkAttachmentSampleLocationsEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentIndex = attachmentIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( attachmentIndex, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( attachmentIndex == rhs.attachmentIndex )
|
|
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t attachmentIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" );
|
|
|
|
struct BaseInStructure
|
|
{
|
|
using NativeType = VkBaseInStructure;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
|
|
: sType( sType_ )
|
|
{}
|
|
|
|
BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBaseInStructure*>( this );
|
|
}
|
|
|
|
explicit operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBaseInStructure*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BaseInStructure const & ) const = default;
|
|
#else
|
|
bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
|
|
const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "BaseInStructure is not nothrow_move_constructible!" );
|
|
|
|
struct BaseOutStructure
|
|
{
|
|
using NativeType = VkBaseOutStructure;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
|
|
: sType( sType_ )
|
|
{}
|
|
|
|
BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBaseOutStructure*>( this );
|
|
}
|
|
|
|
explicit operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBaseOutStructure*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BaseOutStructure const & ) const = default;
|
|
#else
|
|
bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
|
|
struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "BaseOutStructure is not nothrow_move_constructible!" );
|
|
|
|
struct BindAccelerationStructureMemoryInfoNV
|
|
{
|
|
using NativeType = VkBindAccelerationStructureMemoryInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
|
|
: accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructure = accelerationStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = deviceIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceIndices = pDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindAccelerationStructureMemoryInfoNV & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
|
|
pDeviceIndices = deviceIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureNV const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructure == rhs.accelerationStructure )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset )
|
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
|
&& ( pDeviceIndices == rhs.pDeviceIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
uint32_t deviceIndexCount = {};
|
|
const uint32_t * pDeviceIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value, "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
|
|
{
|
|
using Type = BindAccelerationStructureMemoryInfoNV;
|
|
};
|
|
|
|
struct BindBufferMemoryDeviceGroupInfo
|
|
{
|
|
using NativeType = VkBindBufferMemoryDeviceGroupInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
|
|
: deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = deviceIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceIndices = pDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
|
|
pDeviceIndices = deviceIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
|
&& ( pDeviceIndices == rhs.pDeviceIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceIndexCount = {};
|
|
const uint32_t * pDeviceIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
|
|
{
|
|
using Type = BindBufferMemoryDeviceGroupInfo;
|
|
};
|
|
using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
|
|
|
|
struct BindBufferMemoryInfo
|
|
{
|
|
using NativeType = VkBindBufferMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer, memory, memoryOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindBufferMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "BindBufferMemoryInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
|
|
{
|
|
using Type = BindBufferMemoryInfo;
|
|
};
|
|
using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
|
|
|
|
struct Offset2D
|
|
{
|
|
using NativeType = VkOffset2D;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkOffset2D*>( this );
|
|
}
|
|
|
|
explicit operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkOffset2D*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<int32_t const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Offset2D const & ) const = default;
|
|
#else
|
|
bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t x = {};
|
|
int32_t y = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset2D>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset2D>::value, "Offset2D is not nothrow_move_constructible!" );
|
|
|
|
struct Rect2D
|
|
{
|
|
using NativeType = VkRect2D;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ ), extent( extent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRect2D*>( this );
|
|
}
|
|
|
|
explicit operator VkRect2D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRect2D*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, extent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Rect2D const & ) const = default;
|
|
#else
|
|
bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( offset == rhs.offset )
|
|
&& ( extent == rhs.extent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Rect2D>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Rect2D>::value, "Rect2D is not nothrow_move_constructible!" );
|
|
|
|
struct BindImageMemoryDeviceGroupInfo
|
|
{
|
|
using NativeType = VkBindImageMemoryDeviceGroupInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {} )
|
|
: deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = deviceIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceIndices = pDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
|
|
pDeviceIndices = deviceIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
|
|
pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceIndexCount == rhs.deviceIndexCount )
|
|
&& ( pDeviceIndices == rhs.pDeviceIndices )
|
|
&& ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
|
|
&& ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceIndexCount = {};
|
|
const uint32_t * pDeviceIndices = {};
|
|
uint32_t splitInstanceBindRegionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
|
|
{
|
|
using Type = BindImageMemoryDeviceGroupInfo;
|
|
};
|
|
using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
|
|
|
|
struct BindImageMemoryInfo
|
|
{
|
|
using NativeType = VkBindImageMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image, memory, memoryOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindImageMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( image == rhs.image )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "BindImageMemoryInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
|
|
{
|
|
using Type = BindImageMemoryInfo;
|
|
};
|
|
using BindImageMemoryInfoKHR = BindImageMemoryInfo;
|
|
|
|
struct BindImageMemorySwapchainInfoKHR
|
|
{
|
|
using NativeType = VkBindImageMemorySwapchainInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchain( swapchain_ ), imageIndex( imageIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageIndex = imageIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain, imageIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchain == rhs.swapchain )
|
|
&& ( imageIndex == rhs.imageIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
uint32_t imageIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
|
|
{
|
|
using Type = BindImageMemorySwapchainInfoKHR;
|
|
};
|
|
|
|
struct BindImagePlaneMemoryInfo
|
|
{
|
|
using NativeType = VkBindImagePlaneMemoryInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
|
|
: planeAspect( planeAspect_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( planeAspect == rhs.planeAspect );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
|
|
{
|
|
using Type = BindImagePlaneMemoryInfo;
|
|
};
|
|
using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
|
|
|
|
struct BindIndexBufferIndirectCommandNV
|
|
{
|
|
using NativeType = VkBindIndexBufferIndirectCommandNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
|
|
: bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferAddress = bufferAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexType = indexType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
|
|
}
|
|
|
|
explicit operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndexType const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( bufferAddress, size, indexType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
|
|
#else
|
|
bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( bufferAddress == rhs.bufferAddress )
|
|
&& ( size == rhs.size )
|
|
&& ( indexType == rhs.indexType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
|
|
uint32_t size = {};
|
|
VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" );
|
|
|
|
struct BindShaderGroupIndirectCommandNV
|
|
{
|
|
using NativeType = VkBindShaderGroupIndirectCommandNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: groupIndex( groupIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupIndex = groupIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
|
|
}
|
|
|
|
explicit operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( groupIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
|
|
#else
|
|
bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( groupIndex == rhs.groupIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t groupIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" );
|
|
|
|
struct SparseMemoryBind
|
|
{
|
|
using NativeType = VkSparseMemoryBind;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resourceOffset = resourceOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseMemoryBind*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseMemoryBind*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( resourceOffset, size, memory, memoryOffset, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseMemoryBind const & ) const = default;
|
|
#else
|
|
bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( resourceOffset == rhs.resourceOffset )
|
|
&& ( size == rhs.size )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "SparseMemoryBind is not nothrow_move_constructible!" );
|
|
|
|
struct SparseBufferMemoryBindInfo
|
|
{
|
|
using NativeType = VkSparseBufferMemoryBindInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
|
|
: buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = bindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBinds = pBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = static_cast<uint32_t>( binds_.size() );
|
|
pBinds = binds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( buffer, bindCount, pBinds );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( buffer == rhs.buffer )
|
|
&& ( bindCount == rhs.bindCount )
|
|
&& ( pBinds == rhs.pBinds );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
uint32_t bindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" );
|
|
|
|
struct SparseImageOpaqueMemoryBindInfo
|
|
{
|
|
using NativeType = VkSparseImageOpaqueMemoryBindInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
|
|
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = bindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBinds = pBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = static_cast<uint32_t>( binds_.size() );
|
|
pBinds = binds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( image, bindCount, pBinds );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( image == rhs.image )
|
|
&& ( bindCount == rhs.bindCount )
|
|
&& ( pBinds == rhs.pBinds );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
uint32_t bindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" );
|
|
|
|
struct ImageSubresource
|
|
{
|
|
using NativeType = VkImageSubresource;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevel = mipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayLayer = arrayLayer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresource*>( this );
|
|
}
|
|
|
|
explicit operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresource*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, mipLevel, arrayLayer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageSubresource const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( aspectMask == rhs.aspectMask )
|
|
&& ( mipLevel == rhs.mipLevel )
|
|
&& ( arrayLayer == rhs.arrayLayer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t mipLevel = {};
|
|
uint32_t arrayLayer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "ImageSubresource is not nothrow_move_constructible!" );
|
|
|
|
struct Offset3D
|
|
{
|
|
using NativeType = VkOffset3D;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ ), z( z_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) )
|
|
{}
|
|
|
|
explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} )
|
|
: x( offset2D.x )
|
|
, y( offset2D.y )
|
|
, z( z_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
z = z_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkOffset3D*>( this );
|
|
}
|
|
|
|
explicit operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkOffset3D*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<int32_t const &, int32_t const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, z );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Offset3D const & ) const = default;
|
|
#else
|
|
bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y )
|
|
&& ( z == rhs.z );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t x = {};
|
|
int32_t y = {};
|
|
int32_t z = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset3D>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset3D>::value, "Offset3D is not nothrow_move_constructible!" );
|
|
|
|
struct Extent3D
|
|
{
|
|
using NativeType = VkExtent3D;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: width( width_ ), height( height_ ), depth( depth_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) )
|
|
{}
|
|
|
|
explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} )
|
|
: width( extent2D.width )
|
|
, height( extent2D.height )
|
|
, depth( depth_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depth = depth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtent3D*>( this );
|
|
}
|
|
|
|
explicit operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtent3D*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( width, height, depth );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Extent3D const & ) const = default;
|
|
#else
|
|
bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( width == rhs.width )
|
|
&& ( height == rhs.height )
|
|
&& ( depth == rhs.depth );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t depth = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent3D>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent3D>::value, "Extent3D is not nothrow_move_constructible!" );
|
|
|
|
struct SparseImageMemoryBind
|
|
{
|
|
using NativeType = VkSparseImageMemoryBind;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresource = subresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresource const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( subresource, offset, extent, memory, memoryOffset, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageMemoryBind const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( subresource == rhs.subresource )
|
|
&& ( offset == rhs.offset )
|
|
&& ( extent == rhs.extent )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D offset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "SparseImageMemoryBind is not nothrow_move_constructible!" );
|
|
|
|
struct SparseImageMemoryBindInfo
|
|
{
|
|
using NativeType = VkSparseImageMemoryBindInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
|
|
: image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = bindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBinds = pBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindCount = static_cast<uint32_t>( binds_.size() );
|
|
pBinds = binds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Image const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( image, bindCount, pBinds );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( image == rhs.image )
|
|
&& ( bindCount == rhs.bindCount )
|
|
&& ( pBinds == rhs.pBinds );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
uint32_t bindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "SparseImageMemoryBindInfo is not nothrow_move_constructible!" );
|
|
|
|
struct BindSparseInfo
|
|
{
|
|
using NativeType = VkBindSparseInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
|
|
: waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphores = pWaitSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
|
|
pWaitSemaphores = waitSemaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferBindCount = bufferBindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBufferBinds = pBufferBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
|
|
pBufferBinds = bufferBinds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOpaqueBindCount = imageOpaqueBindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageOpaqueBinds = pImageOpaqueBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
|
|
pImageOpaqueBinds = imageOpaqueBinds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageBindCount = imageBindCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageBinds = pImageBinds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
|
|
pImageBinds = imageBinds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = signalSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphores = pSignalSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
|
|
pSignalSemaphores = signalSemaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindSparseInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindSparseInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, bufferBindCount, pBufferBinds, imageOpaqueBindCount, pImageOpaqueBinds, imageBindCount, pImageBinds, signalSemaphoreCount, pSignalSemaphores );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindSparseInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
|
|
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
|
|
&& ( bufferBindCount == rhs.bufferBindCount )
|
|
&& ( pBufferBinds == rhs.pBufferBinds )
|
|
&& ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
|
|
&& ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
|
|
&& ( imageBindCount == rhs.imageBindCount )
|
|
&& ( pImageBinds == rhs.pImageBinds )
|
|
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
|
|
&& ( pSignalSemaphores == rhs.pSignalSemaphores );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
|
|
uint32_t bufferBindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {};
|
|
uint32_t imageOpaqueBindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {};
|
|
uint32_t imageBindCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {};
|
|
uint32_t signalSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "BindSparseInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBindSparseInfo>
|
|
{
|
|
using Type = BindSparseInfo;
|
|
};
|
|
|
|
struct BindVertexBufferIndirectCommandNV
|
|
{
|
|
using NativeType = VkBindVertexBufferIndirectCommandNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferAddress = bufferAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
|
|
}
|
|
|
|
explicit operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( bufferAddress, size, stride );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
|
|
#else
|
|
bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( bufferAddress == rhs.bufferAddress )
|
|
&& ( size == rhs.size )
|
|
&& ( stride == rhs.stride );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
|
|
uint32_t size = {};
|
|
uint32_t stride = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" );
|
|
|
|
struct ImageSubresourceLayers
|
|
{
|
|
using NativeType = VkImageSubresourceLayers;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevel = mipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
|
|
}
|
|
|
|
explicit operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresourceLayers*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageSubresourceLayers const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( aspectMask == rhs.aspectMask )
|
|
&& ( mipLevel == rhs.mipLevel )
|
|
&& ( baseArrayLayer == rhs.baseArrayLayer )
|
|
&& ( layerCount == rhs.layerCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t mipLevel = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "ImageSubresourceLayers is not nothrow_move_constructible!" );
|
|
|
|
struct ImageBlit2
|
|
{
|
|
using NativeType = VkImageBlit2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageBlit2( *reinterpret_cast<ImageBlit2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffsets = srcOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffsets = dstOffsets_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageBlit2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageBlit2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageBlit2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffsets == rhs.srcOffsets )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffsets == rhs.dstOffsets );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "ImageBlit2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageBlit2>
|
|
{
|
|
using Type = ImageBlit2;
|
|
};
|
|
using ImageBlit2KHR = ImageBlit2;
|
|
|
|
struct BlitImageInfo2
|
|
{
|
|
using NativeType = VkBlitImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest) VULKAN_HPP_NOEXCEPT
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BlitImageInfo2( *reinterpret_cast<BlitImageInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
filter = filter_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBlitImageInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBlitImageInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageBlit2 * const &, VULKAN_HPP_NAMESPACE::Filter const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BlitImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcImage == rhs.srcImage )
|
|
&& ( srcImageLayout == rhs.srcImageLayout )
|
|
&& ( dstImage == rhs.dstImage )
|
|
&& ( dstImageLayout == rhs.dstImageLayout )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions )
|
|
&& ( filter == rhs.filter );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {};
|
|
VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "BlitImageInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBlitImageInfo2>
|
|
{
|
|
using Type = BlitImageInfo2;
|
|
};
|
|
using BlitImageInfo2KHR = BlitImageInfo2;
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferCollectionBufferCreateInfoFUCHSIA
|
|
{
|
|
using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: collection( collection_ ), index( index_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
collection = collection_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
index = index_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCollectionBufferCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, collection, index );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( collection == rhs.collection )
|
|
&& ( index == rhs.index );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
|
|
uint32_t index = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value, "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCollectionBufferCreateInfoFUCHSIA>
|
|
{
|
|
using Type = BufferCollectionBufferCreateInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferCollectionConstraintsInfoFUCHSIA
|
|
{
|
|
using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA(uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minBufferCount( minBufferCount_ ), maxBufferCount( maxBufferCount_ ), minBufferCountForCamping( minBufferCountForCamping_ ), minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ), minBufferCountForSharedSlack( minBufferCountForSharedSlack_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast<BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minBufferCount = minBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBufferCount = maxBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minBufferCountForCamping = minBufferCountForCamping_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minBufferCountForSharedSlack = minBufferCountForSharedSlack_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCollectionConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minBufferCount == rhs.minBufferCount )
|
|
&& ( maxBufferCount == rhs.maxBufferCount )
|
|
&& ( minBufferCountForCamping == rhs.minBufferCountForCamping )
|
|
&& ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack )
|
|
&& ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
uint32_t minBufferCount = {};
|
|
uint32_t maxBufferCount = {};
|
|
uint32_t minBufferCountForCamping = {};
|
|
uint32_t minBufferCountForDedicatedSlack = {};
|
|
uint32_t minBufferCountForSharedSlack = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value, "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCollectionConstraintsInfoFUCHSIA>
|
|
{
|
|
using Type = BufferCollectionConstraintsInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferCollectionCreateInfoFUCHSIA
|
|
{
|
|
using NativeType = VkBufferCollectionCreateInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA(zx_handle_t collectionToken_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: collectionToken( collectionToken_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionCreateInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
collectionToken = collectionToken_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCollectionCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, zx_handle_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, collectionToken );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
zx_handle_t collectionToken = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCollectionCreateInfoFUCHSIA>
|
|
{
|
|
using Type = BufferCollectionCreateInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferCollectionImageCreateInfoFUCHSIA
|
|
{
|
|
using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: collection( collection_ ), index( index_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast<BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
collection = collection_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
index = index_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCollectionImageCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, collection, index );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( collection == rhs.collection )
|
|
&& ( index == rhs.index );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
|
|
uint32_t index = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value, "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCollectionImageCreateInfoFUCHSIA>
|
|
{
|
|
using Type = BufferCollectionImageCreateInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct SysmemColorSpaceFUCHSIA
|
|
{
|
|
using NativeType = VkSysmemColorSpaceFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA(uint32_t colorSpace_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: colorSpace( colorSpace_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SysmemColorSpaceFUCHSIA( *reinterpret_cast<SysmemColorSpaceFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorSpace = colorSpace_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSysmemColorSpaceFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSysmemColorSpaceFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, colorSpace );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( colorSpace == rhs.colorSpace );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA;
|
|
const void * pNext = {};
|
|
uint32_t colorSpace = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSysmemColorSpaceFUCHSIA>
|
|
{
|
|
using Type = SysmemColorSpaceFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferCollectionPropertiesFUCHSIA
|
|
{
|
|
using NativeType = VkBufferCollectionPropertiesFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, uint32_t createInfoIndex_ = {}, uint64_t sysmemPixelFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeBits( memoryTypeBits_ ), bufferCount( bufferCount_ ), createInfoIndex( createInfoIndex_ ), sysmemPixelFormat( sysmemPixelFormat_ ), formatFeatures( formatFeatures_ ), sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCollectionPropertiesFUCHSIA( *reinterpret_cast<BufferCollectionPropertiesFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryTypeBits = memoryTypeBits_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferCount = bufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
createInfoIndex = createInfoIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sysmemPixelFormat = sysmemPixelFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatFeatures = formatFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sysmemColorSpaceIndex = sysmemColorSpaceIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
suggestedYcbcrModel = suggestedYcbcrModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
suggestedYcbcrRange = suggestedYcbcrRange_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
suggestedXChromaOffset = suggestedXChromaOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
suggestedYChromaOffset = suggestedYChromaOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCollectionPropertiesFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits, bufferCount, createInfoIndex, sysmemPixelFormat, formatFeatures, sysmemColorSpaceIndex, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits )
|
|
&& ( bufferCount == rhs.bufferCount )
|
|
&& ( createInfoIndex == rhs.createInfoIndex )
|
|
&& ( sysmemPixelFormat == rhs.sysmemPixelFormat )
|
|
&& ( formatFeatures == rhs.formatFeatures )
|
|
&& ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex )
|
|
&& ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
|
|
&& ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
|
|
&& ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
|
|
&& ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
|
|
&& ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
uint32_t bufferCount = {};
|
|
uint32_t createInfoIndex = {};
|
|
uint64_t sysmemPixelFormat = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCollectionPropertiesFUCHSIA>
|
|
{
|
|
using Type = BufferCollectionPropertiesFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct BufferCreateInfo
|
|
{
|
|
using NativeType = VkBufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
|
|
: flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( size == rhs.size )
|
|
&& ( usage == rhs.usage )
|
|
&& ( sharingMode == rhs.sharingMode )
|
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "BufferCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCreateInfo>
|
|
{
|
|
using Type = BufferCreateInfo;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct BufferConstraintsInfoFUCHSIA
|
|
{
|
|
using NativeType = VkBufferConstraintsInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: createInfo( createInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), bufferCollectionConstraints( bufferCollectionConstraints_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferConstraintsInfoFUCHSIA( *reinterpret_cast<BufferConstraintsInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
createInfo = createInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
requiredFormatFeatures = requiredFormatFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferCollectionConstraints = bufferCollectionConstraints_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( createInfo == rhs.createInfo )
|
|
&& ( requiredFormatFeatures == rhs.requiredFormatFeatures )
|
|
&& ( bufferCollectionConstraints == rhs.bufferCollectionConstraints );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferConstraintsInfoFUCHSIA>
|
|
{
|
|
using Type = BufferConstraintsInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct BufferCopy
|
|
{
|
|
using NativeType = VkBufferCopy;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCopy*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCopy*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcOffset, dstOffset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCopy const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( srcOffset == rhs.srcOffset )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "BufferCopy is not nothrow_move_constructible!" );
|
|
|
|
struct BufferCopy2
|
|
{
|
|
using NativeType = VkBufferCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferCopy2(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferCopy2( *reinterpret_cast<BufferCopy2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferCopy2*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferCopy2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcOffset, dstOffset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcOffset == rhs.srcOffset )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "BufferCopy2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferCopy2>
|
|
{
|
|
using Type = BufferCopy2;
|
|
};
|
|
using BufferCopy2KHR = BufferCopy2;
|
|
|
|
struct BufferDeviceAddressCreateInfoEXT
|
|
{
|
|
using NativeType = VkBufferDeviceAddressCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceAddress( deviceAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceAddress = deviceAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceAddress );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceAddress == rhs.deviceAddress );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
|
|
{
|
|
using Type = BufferDeviceAddressCreateInfoEXT;
|
|
};
|
|
|
|
struct BufferDeviceAddressInfo
|
|
{
|
|
using NativeType = VkBufferDeviceAddressInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( buffer == rhs.buffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "BufferDeviceAddressInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
|
|
{
|
|
using Type = BufferDeviceAddressInfo;
|
|
};
|
|
using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
|
|
using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
|
|
|
|
struct BufferImageCopy
|
|
{
|
|
using NativeType = VkBufferImageCopy;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferOffset = bufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferRowLength = bufferRowLength_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferImageHeight = bufferImageHeight_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSubresource = imageSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOffset = imageOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferImageCopy*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferImageCopy*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferImageCopy const & ) const = default;
|
|
#else
|
|
bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( bufferOffset == rhs.bufferOffset )
|
|
&& ( bufferRowLength == rhs.bufferRowLength )
|
|
&& ( bufferImageHeight == rhs.bufferImageHeight )
|
|
&& ( imageSubresource == rhs.imageSubresource )
|
|
&& ( imageOffset == rhs.imageOffset )
|
|
&& ( imageExtent == rhs.imageExtent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
|
|
uint32_t bufferRowLength = {};
|
|
uint32_t bufferImageHeight = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "BufferImageCopy is not nothrow_move_constructible!" );
|
|
|
|
struct BufferImageCopy2
|
|
{
|
|
using NativeType = VkBufferImageCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy2(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferImageCopy2( *reinterpret_cast<BufferImageCopy2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferOffset = bufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferRowLength = bufferRowLength_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferImageHeight = bufferImageHeight_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSubresource = imageSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageOffset = imageOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferImageCopy2*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferImageCopy2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferImageCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( bufferOffset == rhs.bufferOffset )
|
|
&& ( bufferRowLength == rhs.bufferRowLength )
|
|
&& ( bufferImageHeight == rhs.bufferImageHeight )
|
|
&& ( imageSubresource == rhs.imageSubresource )
|
|
&& ( imageOffset == rhs.imageOffset )
|
|
&& ( imageExtent == rhs.imageExtent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
|
|
uint32_t bufferRowLength = {};
|
|
uint32_t bufferImageHeight = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "BufferImageCopy2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferImageCopy2>
|
|
{
|
|
using Type = BufferImageCopy2;
|
|
};
|
|
using BufferImageCopy2KHR = BufferImageCopy2;
|
|
|
|
struct BufferMemoryBarrier
|
|
{
|
|
using NativeType = VkBufferMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferMemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
|
|
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "BufferMemoryBarrier is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
|
|
{
|
|
using Type = BufferMemoryBarrier;
|
|
};
|
|
|
|
struct BufferMemoryBarrier2
|
|
{
|
|
using NativeType = VkBufferMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferMemoryBarrier2( *reinterpret_cast<BufferMemoryBarrier2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryBarrier2*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryBarrier2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferMemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcStageMask == rhs.srcStageMask )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstStageMask == rhs.dstStageMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
|
|
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "BufferMemoryBarrier2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryBarrier2>
|
|
{
|
|
using Type = BufferMemoryBarrier2;
|
|
};
|
|
using BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
|
|
|
|
struct BufferMemoryRequirementsInfo2
|
|
{
|
|
using NativeType = VkBufferMemoryRequirementsInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( buffer == rhs.buffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
|
|
{
|
|
using Type = BufferMemoryRequirementsInfo2;
|
|
};
|
|
using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
|
|
|
|
struct BufferOpaqueCaptureAddressCreateInfo
|
|
{
|
|
using NativeType = VkBufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: opaqueCaptureAddress( opaqueCaptureAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
opaqueCaptureAddress = opaqueCaptureAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, opaqueCaptureAddress );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
|
|
const void * pNext = {};
|
|
uint64_t opaqueCaptureAddress = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value, "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
|
|
{
|
|
using Type = BufferOpaqueCaptureAddressCreateInfo;
|
|
};
|
|
using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
|
|
|
|
struct BufferViewCreateInfo
|
|
{
|
|
using NativeType = VkBufferViewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
range = range_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, buffer, format, offset, range );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( BufferViewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( format == rhs.format )
|
|
&& ( offset == rhs.offset )
|
|
&& ( range == rhs.range );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "BufferViewCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
|
|
{
|
|
using Type = BufferViewCreateInfo;
|
|
};
|
|
|
|
struct CalibratedTimestampInfoEXT
|
|
{
|
|
using NativeType = VkCalibratedTimestampInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice) VULKAN_HPP_NOEXCEPT
|
|
: timeDomain( timeDomain_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timeDomain = timeDomain_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TimeDomainEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, timeDomain );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( timeDomain == rhs.timeDomain );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
|
|
{
|
|
using Type = CalibratedTimestampInfoEXT;
|
|
};
|
|
|
|
struct CheckpointData2NV
|
|
{
|
|
using NativeType = VkCheckpointData2NV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCheckpointData2NV*>( this );
|
|
}
|
|
|
|
explicit operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCheckpointData2NV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stage, pCheckpointMarker );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CheckpointData2NV const & ) const = default;
|
|
#else
|
|
bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stage == rhs.stage )
|
|
&& ( pCheckpointMarker == rhs.pCheckpointMarker );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {};
|
|
void * pCheckpointMarker = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "CheckpointData2NV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCheckpointData2NV>
|
|
{
|
|
using Type = CheckpointData2NV;
|
|
};
|
|
|
|
struct CheckpointDataNV
|
|
{
|
|
using NativeType = VkCheckpointDataNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void * pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCheckpointDataNV*>( this );
|
|
}
|
|
|
|
explicit operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCheckpointDataNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlagBits const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stage, pCheckpointMarker );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CheckpointDataNV const & ) const = default;
|
|
#else
|
|
bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stage == rhs.stage )
|
|
&& ( pCheckpointMarker == rhs.pCheckpointMarker );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
|
|
void * pCheckpointMarker = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "CheckpointDataNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCheckpointDataNV>
|
|
{
|
|
using Type = CheckpointDataNV;
|
|
};
|
|
|
|
union ClearColorValue
|
|
{
|
|
using NativeType = VkClearColorValue;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<float,4>& float32_ = {} )
|
|
: float32( float32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<int32_t,4>& int32_ )
|
|
: int32( int32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
|
|
: uint32( uint32_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float32 = float32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int32 = int32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint32 = uint32_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkClearColorValue const &() const
|
|
{
|
|
return *reinterpret_cast<const VkClearColorValue*>( this );
|
|
}
|
|
|
|
operator VkClearColorValue &()
|
|
{
|
|
return *reinterpret_cast<VkClearColorValue*>( this );
|
|
}
|
|
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
|
|
|
|
};
|
|
|
|
struct ClearDepthStencilValue
|
|
{
|
|
using NativeType = VkClearDepthStencilValue;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: depth( depth_ ), stencil( stencil_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depth = depth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencil = stencil_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
|
|
}
|
|
|
|
explicit operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearDepthStencilValue*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( depth, stencil );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ClearDepthStencilValue const & ) const = default;
|
|
#else
|
|
bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( depth == rhs.depth )
|
|
&& ( stencil == rhs.stencil );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float depth = {};
|
|
uint32_t stencil = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "ClearDepthStencilValue is not nothrow_move_constructible!" );
|
|
|
|
union ClearValue
|
|
{
|
|
using NativeType = VkClearValue;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
|
|
: color( color_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
|
|
: depthStencil( depthStencil_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
color = color_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthStencil = depthStencil_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkClearValue const &() const
|
|
{
|
|
return *reinterpret_cast<const VkClearValue*>( this );
|
|
}
|
|
|
|
operator VkClearValue &()
|
|
{
|
|
return *reinterpret_cast<VkClearValue*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::ClearColorValue color;
|
|
VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
|
|
#else
|
|
VkClearColorValue color;
|
|
VkClearDepthStencilValue depthStencil;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct ClearAttachment
|
|
{
|
|
using NativeType = VkClearAttachment;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachment = colorAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValue = clearValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearAttachment*>( this );
|
|
}
|
|
|
|
explicit operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearAttachment*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, colorAttachment, clearValue );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t colorAttachment = {};
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "ClearAttachment is not nothrow_move_constructible!" );
|
|
|
|
struct ClearRect
|
|
{
|
|
using NativeType = VkClearRect;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rect = rect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkClearRect*>( this );
|
|
}
|
|
|
|
explicit operator VkClearRect &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkClearRect*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( rect, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ClearRect const & ) const = default;
|
|
#else
|
|
bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( rect == rhs.rect )
|
|
&& ( baseArrayLayer == rhs.baseArrayLayer )
|
|
&& ( layerCount == rhs.layerCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Rect2D rect = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearRect>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearRect>::value, "ClearRect is not nothrow_move_constructible!" );
|
|
|
|
struct CoarseSampleLocationNV
|
|
{
|
|
using NativeType = VkCoarseSampleLocationNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pixelX = pixelX_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pixelY = pixelY_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sample = sample_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
|
|
}
|
|
|
|
explicit operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( pixelX, pixelY, sample );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CoarseSampleLocationNV const & ) const = default;
|
|
#else
|
|
bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( pixelX == rhs.pixelX )
|
|
&& ( pixelY == rhs.pixelY )
|
|
&& ( sample == rhs.sample );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t pixelX = {};
|
|
uint32_t pixelY = {};
|
|
uint32_t sample = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "CoarseSampleLocationNV is not nothrow_move_constructible!" );
|
|
|
|
struct CoarseSampleOrderCustomNV
|
|
{
|
|
using NativeType = VkCoarseSampleOrderCustomNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
|
|
: shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRate = shadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleCount = sampleCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationCount = sampleLocationCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSampleLocations = pSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
|
|
pSampleLocations = sampleLocations_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
|
|
}
|
|
|
|
explicit operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
|
|
#else
|
|
bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( shadingRate == rhs.shadingRate )
|
|
&& ( sampleCount == rhs.sampleCount )
|
|
&& ( sampleLocationCount == rhs.sampleLocationCount )
|
|
&& ( pSampleLocations == rhs.pSampleLocations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
|
|
uint32_t sampleCount = {};
|
|
uint32_t sampleLocationCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" );
|
|
|
|
struct CommandBufferAllocateInfo
|
|
{
|
|
using NativeType = VkCommandBufferAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandPool = commandPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
level = level_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPool const &, VULKAN_HPP_NAMESPACE::CommandBufferLevel const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandPool, level, commandBufferCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( commandPool == rhs.commandPool )
|
|
&& ( level == rhs.level )
|
|
&& ( commandBufferCount == rhs.commandBufferCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
|
|
uint32_t commandBufferCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "CommandBufferAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
|
|
{
|
|
using Type = CommandBufferAllocateInfo;
|
|
};
|
|
|
|
struct CommandBufferInheritanceInfo
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framebuffer = framebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
occlusionQueryEnable = occlusionQueryEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryFlags = queryFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatistics = pipelineStatistics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::QueryControlFlags const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( renderPass == rhs.renderPass )
|
|
&& ( subpass == rhs.subpass )
|
|
&& ( framebuffer == rhs.framebuffer )
|
|
&& ( occlusionQueryEnable == rhs.occlusionQueryEnable )
|
|
&& ( queryFlags == rhs.queryFlags )
|
|
&& ( pipelineStatistics == rhs.pipelineStatistics );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t subpass = {};
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
|
|
VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "CommandBufferInheritanceInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
|
|
{
|
|
using Type = CommandBufferInheritanceInfo;
|
|
};
|
|
|
|
struct CommandBufferBeginInfo
|
|
{
|
|
using NativeType = VkCommandBufferBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInheritanceInfo = pInheritanceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags const &, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pInheritanceInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pInheritanceInfo == rhs.pInheritanceInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "CommandBufferBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
|
|
{
|
|
using Type = CommandBufferBeginInfo;
|
|
};
|
|
|
|
struct CommandBufferInheritanceConditionalRenderingInfoEXT
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: conditionalRenderingEnable( conditionalRenderingEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conditionalRenderingEnable = conditionalRenderingEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, conditionalRenderingEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
|
|
{
|
|
using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
|
|
};
|
|
|
|
struct CommandBufferInheritanceRenderPassTransformInfoQCOM
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: transform( transform_ ), renderArea( renderArea_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderArea = renderArea_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Rect2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, transform, renderArea );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( transform == rhs.transform )
|
|
&& ( renderArea == rhs.renderArea );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
|
|
{
|
|
using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
|
|
};
|
|
|
|
struct CommandBufferInheritanceRenderingInfo
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceRenderingInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentFormats( pColorAttachmentFormats_ ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ ), rasterizationSamples( rasterizationSamples_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceRenderingInfo( *reinterpret_cast<CommandBufferInheritanceRenderingInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 )
|
|
: flags( flags_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ ), rasterizationSamples( rasterizationSamples_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachmentFormats = pColorAttachmentFormats_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
|
|
pColorAttachmentFormats = colorAttachmentFormats_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthAttachmentFormat = depthAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilAttachmentFormat = stencilAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationSamples = rasterizationSamples_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceRenderingInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceRenderingInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( viewMask == rhs.viewMask )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
|
|
&& ( depthAttachmentFormat == rhs.depthAttachmentFormat )
|
|
&& ( stencilAttachmentFormat == rhs.stencilAttachmentFormat )
|
|
&& ( rasterizationSamples == rhs.rasterizationSamples );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value, "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderingInfo>
|
|
{
|
|
using Type = CommandBufferInheritanceRenderingInfo;
|
|
};
|
|
using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
|
|
|
|
struct Viewport
|
|
{
|
|
using NativeType = VkViewport;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minDepth = minDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxDepth = maxDepth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkViewport*>( this );
|
|
}
|
|
|
|
explicit operator VkViewport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkViewport*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &, float const &, float const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, width, height, minDepth, maxDepth );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Viewport const & ) const = default;
|
|
#else
|
|
bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y )
|
|
&& ( width == rhs.width )
|
|
&& ( height == rhs.height )
|
|
&& ( minDepth == rhs.minDepth )
|
|
&& ( maxDepth == rhs.maxDepth );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
float width = {};
|
|
float height = {};
|
|
float minDepth = {};
|
|
float maxDepth = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Viewport>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Viewport>::value, "Viewport is not nothrow_move_constructible!" );
|
|
|
|
struct CommandBufferInheritanceViewportScissorInfoNV
|
|
{
|
|
using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, uint32_t viewportDepthCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: viewportScissor2D( viewportScissor2D_ ), viewportDepthCount( viewportDepthCount_ ), pViewportDepths( pViewportDepths_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportScissor2D = viewportScissor2D_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportDepthCount = viewportDepthCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewportDepths = pViewportDepths_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( viewportScissor2D == rhs.viewportScissor2D )
|
|
&& ( viewportDepthCount == rhs.viewportDepthCount )
|
|
&& ( pViewportDepths == rhs.pViewportDepths );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {};
|
|
uint32_t viewportDepthCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) == sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value, "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
|
|
{
|
|
using Type = CommandBufferInheritanceViewportScissorInfoNV;
|
|
};
|
|
|
|
struct CommandBufferSubmitInfo
|
|
{
|
|
using NativeType = VkCommandBufferSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: commandBuffer( commandBuffer_ ), deviceMask( deviceMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandBufferSubmitInfo( *reinterpret_cast<CommandBufferSubmitInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBuffer = commandBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandBufferSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandBufferSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandBuffer const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, commandBuffer, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandBufferSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( commandBuffer == rhs.commandBuffer )
|
|
&& ( deviceMask == rhs.deviceMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
|
|
uint32_t deviceMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "CommandBufferSubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandBufferSubmitInfo>
|
|
{
|
|
using Type = CommandBufferSubmitInfo;
|
|
};
|
|
using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
|
|
|
|
struct CommandPoolCreateInfo
|
|
{
|
|
using NativeType = VkCommandPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CommandPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( queueFamilyIndex == rhs.queueFamilyIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "CommandPoolCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
|
|
{
|
|
using Type = CommandPoolCreateInfo;
|
|
};
|
|
|
|
struct SpecializationMapEntry
|
|
{
|
|
using NativeType = VkSpecializationMapEntry;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: constantID( constantID_ ), offset( offset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
constantID = constantID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
|
|
}
|
|
|
|
explicit operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSpecializationMapEntry*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, size_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( constantID, offset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SpecializationMapEntry const & ) const = default;
|
|
#else
|
|
bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( constantID == rhs.constantID )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t constantID = {};
|
|
uint32_t offset = {};
|
|
size_t size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "SpecializationMapEntry is not nothrow_move_constructible!" );
|
|
|
|
struct SpecializationInfo
|
|
{
|
|
using NativeType = VkSpecializationInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, size_t dataSize_ = {}, const void * pData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
|
|
: mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mapEntryCount = mapEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMapEntries = pMapEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
|
|
pMapEntries = mapEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = dataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pData = pData_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = data_.size() * sizeof(T);
|
|
pData = data_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSpecializationInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSpecializationInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( mapEntryCount, pMapEntries, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SpecializationInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( mapEntryCount == rhs.mapEntryCount )
|
|
&& ( pMapEntries == rhs.pMapEntries )
|
|
&& ( dataSize == rhs.dataSize )
|
|
&& ( pData == rhs.pData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t mapEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {};
|
|
size_t dataSize = {};
|
|
const void * pData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "SpecializationInfo is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineShaderStageCreateInfo
|
|
{
|
|
using NativeType = VkPipelineShaderStageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char * pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stage = stage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
module = module_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pName = pName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpecializationInfo = pSpecializationInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags const &, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &, VULKAN_HPP_NAMESPACE::ShaderModule const &, const char * const &, const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) return cmp;
|
|
if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
|
|
if ( pName != rhs.pName )
|
|
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( stage == rhs.stage )
|
|
&& ( module == rhs.module )
|
|
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) )
|
|
&& ( pSpecializationInfo == rhs.pSpecializationInfo );
|
|
}
|
|
|
|
bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
|
|
VULKAN_HPP_NAMESPACE::ShaderModule module = {};
|
|
const char * pName = {};
|
|
const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
|
|
{
|
|
using Type = PipelineShaderStageCreateInfo;
|
|
};
|
|
|
|
struct ComputePipelineCreateInfo
|
|
{
|
|
using NativeType = VkComputePipelineCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stage = stage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( stage == rhs.stage )
|
|
&& ( layout == rhs.layout )
|
|
&& ( basePipelineHandle == rhs.basePipelineHandle )
|
|
&& ( basePipelineIndex == rhs.basePipelineIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "ComputePipelineCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
|
|
{
|
|
using Type = ComputePipelineCreateInfo;
|
|
};
|
|
|
|
struct ConditionalRenderingBeginInfoEXT
|
|
{
|
|
using NativeType = VkConditionalRenderingBeginInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ ), offset( offset_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer, offset, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
|
|
{
|
|
using Type = ConditionalRenderingBeginInfoEXT;
|
|
};
|
|
|
|
struct ConformanceVersion
|
|
{
|
|
using NativeType = VkConformanceVersion;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
major = major_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minor = minor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subminor = subminor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
patch = patch_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkConformanceVersion*>( this );
|
|
}
|
|
|
|
explicit operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkConformanceVersion*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( major, minor, subminor, patch );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ConformanceVersion const & ) const = default;
|
|
#else
|
|
bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( major == rhs.major )
|
|
&& ( minor == rhs.minor )
|
|
&& ( subminor == rhs.subminor )
|
|
&& ( patch == rhs.patch );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint8_t major = {};
|
|
uint8_t minor = {};
|
|
uint8_t subminor = {};
|
|
uint8_t patch = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "ConformanceVersion is not nothrow_move_constructible!" );
|
|
using ConformanceVersionKHR = ConformanceVersion;
|
|
|
|
struct CooperativeMatrixPropertiesNV
|
|
{
|
|
using NativeType = VkCooperativeMatrixPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
|
|
: MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
MSize = MSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
NSize = NSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
KSize = KSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
AType = AType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
BType = BType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
CType = CType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
DType = DType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scope = scope_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, VULKAN_HPP_NAMESPACE::ScopeNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( MSize == rhs.MSize )
|
|
&& ( NSize == rhs.NSize )
|
|
&& ( KSize == rhs.KSize )
|
|
&& ( AType == rhs.AType )
|
|
&& ( BType == rhs.BType )
|
|
&& ( CType == rhs.CType )
|
|
&& ( DType == rhs.DType )
|
|
&& ( scope == rhs.scope );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
|
|
void * pNext = {};
|
|
uint32_t MSize = {};
|
|
uint32_t NSize = {};
|
|
uint32_t KSize = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
|
|
VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
|
|
VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
|
|
VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
|
|
VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
|
|
{
|
|
using Type = CooperativeMatrixPropertiesNV;
|
|
};
|
|
|
|
struct CopyAccelerationStructureInfoKHR
|
|
{
|
|
using NativeType = VkCopyAccelerationStructureInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
|
|
: src( src_ ), dst( dst_ ), mode( mode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
src = src_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dst = dst_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, src, dst, mode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( src == rhs.src )
|
|
&& ( dst == rhs.dst )
|
|
&& ( mode == rhs.mode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
|
|
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
|
|
{
|
|
using Type = CopyAccelerationStructureInfoKHR;
|
|
};
|
|
|
|
struct CopyAccelerationStructureToMemoryInfoKHR
|
|
{
|
|
using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
|
|
: src( src_ ), dst( dst_ ), mode( mode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
src = src_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dst = dst_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, src, dst, mode );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
|
|
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value, "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
|
|
{
|
|
using Type = CopyAccelerationStructureToMemoryInfoKHR;
|
|
};
|
|
|
|
struct CopyBufferInfo2
|
|
{
|
|
using NativeType = VkCopyBufferInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyBufferInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyBufferInfo2( *reinterpret_cast<CopyBufferInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ )
|
|
: srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBuffer = srcBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBuffer = dstBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyBufferInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyBufferInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferCopy2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyBufferInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcBuffer == rhs.srcBuffer )
|
|
&& ( dstBuffer == rhs.dstBuffer )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "CopyBufferInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyBufferInfo2>
|
|
{
|
|
using Type = CopyBufferInfo2;
|
|
};
|
|
using CopyBufferInfo2KHR = CopyBufferInfo2;
|
|
|
|
struct CopyBufferToImageInfo2
|
|
{
|
|
using NativeType = VkCopyBufferToImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyBufferToImageInfo2( *reinterpret_cast<CopyBufferToImageInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
|
|
: srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBuffer = srcBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyBufferToImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyBufferToImageInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyBufferToImageInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyBufferToImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcBuffer == rhs.srcBuffer )
|
|
&& ( dstImage == rhs.dstImage )
|
|
&& ( dstImageLayout == rhs.dstImageLayout )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "CopyBufferToImageInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2>
|
|
{
|
|
using Type = CopyBufferToImageInfo2;
|
|
};
|
|
using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
|
|
|
|
struct CopyCommandTransformInfoQCOM
|
|
{
|
|
using NativeType = VkCopyCommandTransformInfoQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
|
|
: transform( transform_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, transform );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( transform == rhs.transform );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
|
|
{
|
|
using Type = CopyCommandTransformInfoQCOM;
|
|
};
|
|
|
|
struct CopyDescriptorSet
|
|
{
|
|
using NativeType = VkCopyDescriptorSet;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSet = srcSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBinding = srcBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcArrayElement = srcArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSet = dstSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBinding = dstBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstArrayElement = dstArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyDescriptorSet*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyDescriptorSet const & ) const = default;
|
|
#else
|
|
bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcSet == rhs.srcSet )
|
|
&& ( srcBinding == rhs.srcBinding )
|
|
&& ( srcArrayElement == rhs.srcArrayElement )
|
|
&& ( dstSet == rhs.dstSet )
|
|
&& ( dstBinding == rhs.dstBinding )
|
|
&& ( dstArrayElement == rhs.dstArrayElement )
|
|
&& ( descriptorCount == rhs.descriptorCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
|
|
uint32_t srcBinding = {};
|
|
uint32_t srcArrayElement = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
|
|
uint32_t dstBinding = {};
|
|
uint32_t dstArrayElement = {};
|
|
uint32_t descriptorCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "CopyDescriptorSet is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyDescriptorSet>
|
|
{
|
|
using Type = CopyDescriptorSet;
|
|
};
|
|
|
|
struct ImageCopy2
|
|
{
|
|
using NativeType = VkImageCopy2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCopy2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageCopy2( *reinterpret_cast<ImageCopy2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCopy2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCopy2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageCopy2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffset == rhs.srcOffset )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( extent == rhs.extent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "ImageCopy2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageCopy2>
|
|
{
|
|
using Type = ImageCopy2;
|
|
};
|
|
using ImageCopy2KHR = ImageCopy2;
|
|
|
|
struct CopyImageInfo2
|
|
{
|
|
using NativeType = VkCopyImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyImageInfo2( *reinterpret_cast<CopyImageInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ )
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyImageInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyImageInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcImage == rhs.srcImage )
|
|
&& ( srcImageLayout == rhs.srcImageLayout )
|
|
&& ( dstImage == rhs.dstImage )
|
|
&& ( dstImageLayout == rhs.dstImageLayout )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "CopyImageInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyImageInfo2>
|
|
{
|
|
using Type = CopyImageInfo2;
|
|
};
|
|
using CopyImageInfo2KHR = CopyImageInfo2;
|
|
|
|
struct CopyImageToBufferInfo2
|
|
{
|
|
using NativeType = VkCopyImageToBufferInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyImageToBufferInfo2( *reinterpret_cast<CopyImageToBufferInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ )
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBuffer = dstBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CopyImageToBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyImageToBufferInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyImageToBufferInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Buffer const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CopyImageToBufferInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcImage == rhs.srcImage )
|
|
&& ( srcImageLayout == rhs.srcImageLayout )
|
|
&& ( dstBuffer == rhs.dstBuffer )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "CopyImageToBufferInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2>
|
|
{
|
|
using Type = CopyImageToBufferInfo2;
|
|
};
|
|
using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
|
|
|
|
struct CopyMemoryToAccelerationStructureInfoKHR
|
|
{
|
|
using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
|
|
: src( src_ ), dst( dst_ ), mode( mode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
src = src_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dst = dst_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, src, dst, mode );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
|
|
VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
|
|
VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value, "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
|
|
{
|
|
using Type = CopyMemoryToAccelerationStructureInfoKHR;
|
|
};
|
|
|
|
struct CuFunctionCreateInfoNVX
|
|
{
|
|
using NativeType = VkCuFunctionCreateInfoNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: module( module_ ), pName( pName_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
module = module_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pName = pName_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCuFunctionCreateInfoNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCuFunctionCreateInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuModuleNVX const &, const char * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, module, pName );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp;
|
|
if ( pName != rhs.pName )
|
|
if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( module == rhs.module )
|
|
&& ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) );
|
|
}
|
|
|
|
bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CuModuleNVX module = {};
|
|
const char * pName = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
|
|
{
|
|
using Type = CuFunctionCreateInfoNVX;
|
|
};
|
|
|
|
struct CuLaunchInfoNVX
|
|
{
|
|
using NativeType = VkCuLaunchInfoNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( paramCount_ ), pParams( pParams_ ), extraCount( extraCount_ ), pExtras( pExtras_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ = {} )
|
|
: function( function_ ), gridDimX( gridDimX_ ), gridDimY( gridDimY_ ), gridDimZ( gridDimZ_ ), blockDimX( blockDimX_ ), blockDimY( blockDimY_ ), blockDimZ( blockDimZ_ ), sharedMemBytes( sharedMemBytes_ ), paramCount( params_.size() ), pParams( params_.data() ), extraCount( extras_.size() ), pExtras( extras_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
function = function_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
gridDimX = gridDimX_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
gridDimY = gridDimY_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
gridDimZ = gridDimZ_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blockDimX = blockDimX_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blockDimY = blockDimY_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blockDimZ = blockDimZ_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharedMemBytes = sharedMemBytes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
paramCount = paramCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pParams = pParams_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & params_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
paramCount = params_.size();
|
|
pParams = params_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extraCount = extraCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pExtras = pExtras_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const void * const> const & extras_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extraCount = extras_.size();
|
|
pExtras = extras_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCuLaunchInfoNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCuLaunchInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CuFunctionNVX const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, const void * const * const &, size_t const &, const void * const * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CuLaunchInfoNVX const & ) const = default;
|
|
#else
|
|
bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( function == rhs.function )
|
|
&& ( gridDimX == rhs.gridDimX )
|
|
&& ( gridDimY == rhs.gridDimY )
|
|
&& ( gridDimZ == rhs.gridDimZ )
|
|
&& ( blockDimX == rhs.blockDimX )
|
|
&& ( blockDimY == rhs.blockDimY )
|
|
&& ( blockDimZ == rhs.blockDimZ )
|
|
&& ( sharedMemBytes == rhs.sharedMemBytes )
|
|
&& ( paramCount == rhs.paramCount )
|
|
&& ( pParams == rhs.pParams )
|
|
&& ( extraCount == rhs.extraCount )
|
|
&& ( pExtras == rhs.pExtras );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {};
|
|
uint32_t gridDimX = {};
|
|
uint32_t gridDimY = {};
|
|
uint32_t gridDimZ = {};
|
|
uint32_t blockDimX = {};
|
|
uint32_t blockDimY = {};
|
|
uint32_t blockDimZ = {};
|
|
uint32_t sharedMemBytes = {};
|
|
size_t paramCount = {};
|
|
const void * const * pParams = {};
|
|
size_t extraCount = {};
|
|
const void * const * pExtras = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "CuLaunchInfoNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
|
|
{
|
|
using Type = CuLaunchInfoNVX;
|
|
};
|
|
|
|
struct CuModuleCreateInfoNVX
|
|
{
|
|
using NativeType = VkCuModuleCreateInfoNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX(size_t dataSize_ = {}, const void * pData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dataSize( dataSize_ ), pData( pData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
|
|
: dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = dataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pData = pData_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = data_.size() * sizeof(T);
|
|
pData = data_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkCuModuleCreateInfoNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkCuModuleCreateInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
|
|
#else
|
|
bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dataSize == rhs.dataSize )
|
|
&& ( pData == rhs.pData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX;
|
|
const void * pNext = {};
|
|
size_t dataSize = {};
|
|
const void * pData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "CuModuleCreateInfoNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
|
|
{
|
|
using Type = CuModuleCreateInfoNVX;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct D3D12FenceSubmitInfoKHR
|
|
{
|
|
using NativeType = VkD3D12FenceSubmitInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
|
|
: waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreValues = pWaitSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
|
|
pWaitSemaphoreValues = waitSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreValues = pSignalSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
|
|
pSignalSemaphoreValues = signalSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
|
|
&& ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
|
|
&& ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
|
|
&& ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreValuesCount = {};
|
|
const uint64_t * pWaitSemaphoreValues = {};
|
|
uint32_t signalSemaphoreValuesCount = {};
|
|
const uint64_t * pSignalSemaphoreValues = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
|
|
{
|
|
using Type = D3D12FenceSubmitInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct DebugMarkerMarkerInfoEXT
|
|
{
|
|
using NativeType = VkDebugMarkerMarkerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char * pMarkerName_ = {}, std::array<float,4> const & color_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pMarkerName( pMarkerName_ ), color( color_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMarkerName = pMarkerName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
color = color_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pMarkerName, color );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( pMarkerName != rhs.pMarkerName )
|
|
if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
|
|
if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
|
|
|
|
return std::partial_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) )
|
|
&& ( color == rhs.color );
|
|
}
|
|
|
|
bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
|
|
const void * pNext = {};
|
|
const char * pMarkerName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
|
|
{
|
|
using Type = DebugMarkerMarkerInfoEXT;
|
|
};
|
|
|
|
struct DebugMarkerObjectNameInfoEXT
|
|
{
|
|
using NativeType = VkDebugMarkerObjectNameInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char * pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
object = object_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectName = pObjectName_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, const char * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, object, pObjectName );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = object <=> rhs.object; cmp != 0 ) return cmp;
|
|
if ( pObjectName != rhs.pObjectName )
|
|
if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( objectType == rhs.objectType )
|
|
&& ( object == rhs.object )
|
|
&& ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
|
|
}
|
|
|
|
bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
|
|
uint64_t object = {};
|
|
const char * pObjectName = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
|
|
{
|
|
using Type = DebugMarkerObjectNameInfoEXT;
|
|
};
|
|
|
|
struct DebugMarkerObjectTagInfoEXT
|
|
{
|
|
using NativeType = VkDebugMarkerObjectTagInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
|
|
: objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
object = object_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagName = tagName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tagSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTag = pTag_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tag_.size() * sizeof(T);
|
|
pTag = tag_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( objectType == rhs.objectType )
|
|
&& ( object == rhs.object )
|
|
&& ( tagName == rhs.tagName )
|
|
&& ( tagSize == rhs.tagSize )
|
|
&& ( pTag == rhs.pTag );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
|
|
uint64_t object = {};
|
|
uint64_t tagName = {};
|
|
size_t tagSize = {};
|
|
const void * pTag = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
|
|
{
|
|
using Type = DebugMarkerObjectTagInfoEXT;
|
|
};
|
|
|
|
struct DebugReportCallbackCreateInfoEXT
|
|
{
|
|
using NativeType = VkDebugReportCallbackCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void * pUserData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnCallback = pfnCallback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT const &, PFN_vkDebugReportCallbackEXT const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pfnCallback, pUserData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pfnCallback == rhs.pfnCallback )
|
|
&& ( pUserData == rhs.pUserData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
|
|
PFN_vkDebugReportCallbackEXT pfnCallback = {};
|
|
void * pUserData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
|
|
{
|
|
using Type = DebugReportCallbackCreateInfoEXT;
|
|
};
|
|
|
|
struct DebugUtilsLabelEXT
|
|
{
|
|
using NativeType = VkDebugUtilsLabelEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char * pLabelName_ = {}, std::array<float,4> const & color_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pLabelName( pLabelName_ ), color( color_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLabelName = pLabelName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
color = color_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const char * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pLabelName, color );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( pLabelName != rhs.pLabelName )
|
|
if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater;
|
|
if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp;
|
|
|
|
return std::partial_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) )
|
|
&& ( color == rhs.color );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
|
|
const void * pNext = {};
|
|
const char * pLabelName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "DebugUtilsLabelEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
|
|
{
|
|
using Type = DebugUtilsLabelEXT;
|
|
};
|
|
|
|
struct DebugUtilsObjectNameInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsObjectNameInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char * pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectHandle = objectHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjectName = pObjectName_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, const char * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, objectHandle, pObjectName );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) return cmp;
|
|
if ( pObjectName != rhs.pObjectName )
|
|
if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( objectType == rhs.objectType )
|
|
&& ( objectHandle == rhs.objectHandle )
|
|
&& ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
const char * pObjectName = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
|
|
{
|
|
using Type = DebugUtilsObjectNameInfoEXT;
|
|
};
|
|
|
|
struct DebugUtilsMessengerCallbackDataEXT
|
|
{
|
|
using NativeType = VkDebugUtilsMessengerCallbackDataEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char * pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char * pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char * pMessageIdName_, int32_t messageIdNumber_, const char * pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
|
|
: flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMessageIdName = pMessageIdName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageIdNumber = messageIdNumber_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMessage = pMessage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueLabelCount = queueLabelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueLabels = pQueueLabels_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
|
|
pQueueLabels = queueLabels_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cmdBufLabelCount = cmdBufLabelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCmdBufLabels = pCmdBufLabels_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
|
|
pCmdBufLabels = cmdBufLabels_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = objectCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pObjects = pObjects_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectCount = static_cast<uint32_t>( objects_.size() );
|
|
pObjects = objects_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT const &, const char * const &, int32_t const &, const char * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( pMessageIdName != rhs.pMessageIdName )
|
|
if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) return cmp;
|
|
if ( pMessage != rhs.pMessage )
|
|
if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) return cmp;
|
|
if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) return cmp;
|
|
if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) )
|
|
&& ( messageIdNumber == rhs.messageIdNumber )
|
|
&& ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) )
|
|
&& ( queueLabelCount == rhs.queueLabelCount )
|
|
&& ( pQueueLabels == rhs.pQueueLabels )
|
|
&& ( cmdBufLabelCount == rhs.cmdBufLabelCount )
|
|
&& ( pCmdBufLabels == rhs.pCmdBufLabels )
|
|
&& ( objectCount == rhs.objectCount )
|
|
&& ( pObjects == rhs.pObjects );
|
|
}
|
|
|
|
bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
|
|
const char * pMessageIdName = {};
|
|
int32_t messageIdNumber = {};
|
|
const char * pMessage = {};
|
|
uint32_t queueLabelCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {};
|
|
uint32_t cmdBufLabelCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {};
|
|
uint32_t objectCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value, "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
|
|
{
|
|
using Type = DebugUtilsMessengerCallbackDataEXT;
|
|
};
|
|
|
|
struct DebugUtilsMessengerCreateInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsMessengerCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageSeverity = messageSeverity_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
messageType = messageType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnUserCallback = pfnUserCallback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &, PFN_vkDebugUtilsMessengerCallbackEXT const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( messageSeverity == rhs.messageSeverity )
|
|
&& ( messageType == rhs.messageType )
|
|
&& ( pfnUserCallback == rhs.pfnUserCallback )
|
|
&& ( pUserData == rhs.pUserData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
|
|
VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
|
|
PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
|
|
void * pUserData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
|
|
{
|
|
using Type = DebugUtilsMessengerCreateInfoEXT;
|
|
};
|
|
|
|
struct DebugUtilsObjectTagInfoEXT
|
|
{
|
|
using NativeType = VkDebugUtilsObjectTagInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
|
|
: objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectType = objectType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
objectHandle = objectHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagName = tagName_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tagSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTag = pTag_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tagSize = tag_.size() * sizeof(T);
|
|
pTag = tag_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint64_t const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( objectType == rhs.objectType )
|
|
&& ( objectHandle == rhs.objectHandle )
|
|
&& ( tagName == rhs.tagName )
|
|
&& ( tagSize == rhs.tagSize )
|
|
&& ( pTag == rhs.pTag );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
uint64_t tagName = {};
|
|
size_t tagSize = {};
|
|
const void * pTag = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
|
|
{
|
|
using Type = DebugUtilsObjectTagInfoEXT;
|
|
};
|
|
|
|
struct DedicatedAllocationBufferCreateInfoNV
|
|
{
|
|
using NativeType = VkDedicatedAllocationBufferCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dedicatedAllocation( dedicatedAllocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dedicatedAllocation = dedicatedAllocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dedicatedAllocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dedicatedAllocation == rhs.dedicatedAllocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value, "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
|
|
{
|
|
using Type = DedicatedAllocationBufferCreateInfoNV;
|
|
};
|
|
|
|
struct DedicatedAllocationImageCreateInfoNV
|
|
{
|
|
using NativeType = VkDedicatedAllocationImageCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dedicatedAllocation( dedicatedAllocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dedicatedAllocation = dedicatedAllocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dedicatedAllocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dedicatedAllocation == rhs.dedicatedAllocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value, "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
|
|
{
|
|
using Type = DedicatedAllocationImageCreateInfoNV;
|
|
};
|
|
|
|
struct DedicatedAllocationMemoryAllocateInfoNV
|
|
{
|
|
using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ ), buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image, buffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( image == rhs.image )
|
|
&& ( buffer == rhs.buffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value, "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
|
|
{
|
|
using Type = DedicatedAllocationMemoryAllocateInfoNV;
|
|
};
|
|
|
|
struct MemoryBarrier2
|
|
{
|
|
using NativeType = VkMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryBarrier2( *reinterpret_cast<MemoryBarrier2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryBarrier2*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryBarrier2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcStageMask == rhs.srcStageMask )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstStageMask == rhs.dstStageMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "MemoryBarrier2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryBarrier2>
|
|
{
|
|
using Type = MemoryBarrier2;
|
|
};
|
|
using MemoryBarrier2KHR = MemoryBarrier2;
|
|
|
|
struct ImageSubresourceRange
|
|
{
|
|
using NativeType = VkImageSubresourceRange;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseMipLevel = baseMipLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
levelCount = levelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSubresourceRange*>( this );
|
|
}
|
|
|
|
explicit operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSubresourceRange*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageSubresourceRange const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( aspectMask == rhs.aspectMask )
|
|
&& ( baseMipLevel == rhs.baseMipLevel )
|
|
&& ( levelCount == rhs.levelCount )
|
|
&& ( baseArrayLayer == rhs.baseArrayLayer )
|
|
&& ( layerCount == rhs.layerCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
uint32_t baseMipLevel = {};
|
|
uint32_t levelCount = {};
|
|
uint32_t baseArrayLayer = {};
|
|
uint32_t layerCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "ImageSubresourceRange is not nothrow_move_constructible!" );
|
|
|
|
struct ImageMemoryBarrier2
|
|
{
|
|
using NativeType = VkImageMemoryBarrier2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcStageMask( srcStageMask_ ), srcAccessMask( srcAccessMask_ ), dstStageMask( dstStageMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageMemoryBarrier2( *reinterpret_cast<ImageMemoryBarrier2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldLayout = oldLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
newLayout = newLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryBarrier2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryBarrier2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, VULKAN_HPP_NAMESPACE::AccessFlags2 const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageMemoryBarrier2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcStageMask == rhs.srcStageMask )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstStageMask == rhs.dstStageMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( oldLayout == rhs.oldLayout )
|
|
&& ( newLayout == rhs.newLayout )
|
|
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
|
|
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
|
|
&& ( image == rhs.image )
|
|
&& ( subresourceRange == rhs.subresourceRange );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "ImageMemoryBarrier2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryBarrier2>
|
|
{
|
|
using Type = ImageMemoryBarrier2;
|
|
};
|
|
using ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
|
|
|
|
struct DependencyInfo
|
|
{
|
|
using NativeType = VkDependencyInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DependencyInfo(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dependencyFlags( dependencyFlags_ ), memoryBarrierCount( memoryBarrierCount_ ), pMemoryBarriers( pMemoryBarriers_ ), bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ), pBufferMemoryBarriers( pBufferMemoryBarriers_ ), imageMemoryBarrierCount( imageMemoryBarrierCount_ ), pImageMemoryBarriers( pImageMemoryBarriers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DependencyInfo( *reinterpret_cast<DependencyInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ = {} )
|
|
: dependencyFlags( dependencyFlags_ ), memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) ), pMemoryBarriers( memoryBarriers_.data() ), bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) ), pBufferMemoryBarriers( bufferMemoryBarriers_.data() ), imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) ), pImageMemoryBarriers( imageMemoryBarriers_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBarrierCount = memoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMemoryBarriers = pMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo & setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2> const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
|
|
pMemoryBarriers = memoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBufferMemoryBarriers = pBufferMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo & setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2> const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
|
|
pBufferMemoryBarriers = bufferMemoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageMemoryBarrierCount = imageMemoryBarrierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageMemoryBarriers = pImageMemoryBarriers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DependencyInfo & setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2> const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
|
|
pImageMemoryBarriers = imageMemoryBarriers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDependencyInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDependencyInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DependencyInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dependencyFlags == rhs.dependencyFlags )
|
|
&& ( memoryBarrierCount == rhs.memoryBarrierCount )
|
|
&& ( pMemoryBarriers == rhs.pMemoryBarriers )
|
|
&& ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount )
|
|
&& ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers )
|
|
&& ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount )
|
|
&& ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
uint32_t memoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {};
|
|
uint32_t bufferMemoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {};
|
|
uint32_t imageMemoryBarrierCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "DependencyInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDependencyInfo>
|
|
{
|
|
using Type = DependencyInfo;
|
|
};
|
|
using DependencyInfoKHR = DependencyInfo;
|
|
|
|
struct DescriptorBufferInfo
|
|
{
|
|
using NativeType = VkDescriptorBufferInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ ), offset( offset_ ), range( range_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
range = range_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( buffer, offset, range );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorBufferInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset )
|
|
&& ( range == rhs.range );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize range = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "DescriptorBufferInfo is not nothrow_move_constructible!" );
|
|
|
|
struct DescriptorImageInfo
|
|
{
|
|
using NativeType = VkDescriptorImageInfo;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampler = sampler_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorImageInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Sampler const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sampler, imageView, imageLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorImageInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sampler == rhs.sampler )
|
|
&& ( imageView == rhs.imageView )
|
|
&& ( imageLayout == rhs.imageLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "DescriptorImageInfo is not nothrow_move_constructible!" );
|
|
|
|
struct DescriptorPoolSize
|
|
{
|
|
using NativeType = VkDescriptorPoolSize;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), descriptorCount( descriptorCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolSize*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( type, descriptorCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorPoolSize const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( type == rhs.type )
|
|
&& ( descriptorCount == rhs.descriptorCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
uint32_t descriptorCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "DescriptorPoolSize is not nothrow_move_constructible!" );
|
|
|
|
struct DescriptorPoolCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
|
|
: flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSets = maxSets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolSizeCount = poolSizeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPoolSizes = pPoolSizes_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
|
|
pPoolSizes = poolSizes_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( maxSets == rhs.maxSets )
|
|
&& ( poolSizeCount == rhs.poolSizeCount )
|
|
&& ( pPoolSizes == rhs.pPoolSizes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
|
|
uint32_t maxSets = {};
|
|
uint32_t poolSizeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "DescriptorPoolCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
|
|
{
|
|
using Type = DescriptorPoolCreateInfo;
|
|
};
|
|
|
|
struct DescriptorPoolInlineUniformBlockCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxInlineUniformBlockBindings );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t maxInlineUniformBlockBindings = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value, "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfo>
|
|
{
|
|
using Type = DescriptorPoolInlineUniformBlockCreateInfo;
|
|
};
|
|
using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
|
|
|
|
struct DescriptorSetAllocateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
|
|
: descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorPool = descriptorPool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = descriptorSetCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetLayouts = pSetLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
|
|
pSetLayouts = setLayouts_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorPool const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( descriptorPool == rhs.descriptorPool )
|
|
&& ( descriptorSetCount == rhs.descriptorSetCount )
|
|
&& ( pSetLayouts == rhs.pSetLayouts );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
|
|
uint32_t descriptorSetCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "DescriptorSetAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
|
|
{
|
|
using Type = DescriptorSetAllocateInfo;
|
|
};
|
|
|
|
struct DescriptorSetBindingReferenceVALVE
|
|
{
|
|
using NativeType = VkDescriptorSetBindingReferenceVALVE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorSetLayout( descriptorSetLayout_ ), binding( binding_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetBindingReferenceVALVE( *reinterpret_cast<DescriptorSetBindingReferenceVALVE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetLayout = descriptorSetLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetBindingReferenceVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorSetLayout, binding );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( descriptorSetLayout == rhs.descriptorSetLayout )
|
|
&& ( binding == rhs.binding );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
|
|
uint32_t binding = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value, "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetBindingReferenceVALVE>
|
|
{
|
|
using Type = DescriptorSetBindingReferenceVALVE;
|
|
};
|
|
|
|
struct DescriptorSetLayoutBinding
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutBinding;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
|
|
: binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageFlags = stageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImmutableSamplers = pImmutableSamplers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
|
|
pImmutableSamplers = immutableSamplers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, const VULKAN_HPP_NAMESPACE::Sampler * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( binding == rhs.binding )
|
|
&& ( descriptorType == rhs.descriptorType )
|
|
&& ( descriptorCount == rhs.descriptorCount )
|
|
&& ( stageFlags == rhs.stageFlags )
|
|
&& ( pImmutableSamplers == rhs.pImmutableSamplers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
uint32_t descriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
|
|
const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "DescriptorSetLayoutBinding is not nothrow_move_constructible!" );
|
|
|
|
struct DescriptorSetLayoutBindingFlagsCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
|
|
: bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = bindingCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBindingFlags = pBindingFlags_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
|
|
pBindingFlags = bindingFlags_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bindingCount, pBindingFlags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( bindingCount == rhs.bindingCount )
|
|
&& ( pBindingFlags == rhs.pBindingFlags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t bindingCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value, "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
|
|
{
|
|
using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
|
|
};
|
|
using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
|
|
|
|
struct DescriptorSetLayoutCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
|
|
: flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = bindingCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBindings = pBindings_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bindingCount = static_cast<uint32_t>( bindings_.size() );
|
|
pBindings = bindings_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, bindingCount, pBindings );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( bindingCount == rhs.bindingCount )
|
|
&& ( pBindings == rhs.pBindings );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
|
|
uint32_t bindingCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
|
|
{
|
|
using Type = DescriptorSetLayoutCreateInfo;
|
|
};
|
|
|
|
struct DescriptorSetLayoutHostMappingInfoVALVE
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE(size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorOffset( descriptorOffset_ ), descriptorSize( descriptorSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast<DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorOffset = descriptorOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSize = descriptorSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, size_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorOffset, descriptorSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( descriptorOffset == rhs.descriptorOffset )
|
|
&& ( descriptorSize == rhs.descriptorSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE;
|
|
void * pNext = {};
|
|
size_t descriptorOffset = {};
|
|
uint32_t descriptorSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value, "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutHostMappingInfoVALVE>
|
|
{
|
|
using Type = DescriptorSetLayoutHostMappingInfoVALVE;
|
|
};
|
|
|
|
struct DescriptorSetLayoutSupport
|
|
{
|
|
using NativeType = VkDescriptorSetLayoutSupport;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supported( supported_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supported );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supported == rhs.supported );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "DescriptorSetLayoutSupport is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
|
|
{
|
|
using Type = DescriptorSetLayoutSupport;
|
|
};
|
|
using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
|
|
|
|
struct DescriptorSetVariableDescriptorCountAllocateInfo
|
|
{
|
|
using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t * pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
|
|
: descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = descriptorSetCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDescriptorCounts = pDescriptorCounts_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
|
|
pDescriptorCounts = descriptorCounts_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( descriptorSetCount == rhs.descriptorSetCount )
|
|
&& ( pDescriptorCounts == rhs.pDescriptorCounts );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
|
|
const void * pNext = {};
|
|
uint32_t descriptorSetCount = {};
|
|
const uint32_t * pDescriptorCounts = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value, "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
|
|
{
|
|
using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
|
|
};
|
|
using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
|
|
|
|
struct DescriptorSetVariableDescriptorCountLayoutSupport
|
|
{
|
|
using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxVariableDescriptorCount( maxVariableDescriptorCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxVariableDescriptorCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
|
|
void * pNext = {};
|
|
uint32_t maxVariableDescriptorCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value, "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
|
|
{
|
|
using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
|
|
};
|
|
using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
|
|
|
|
struct DescriptorUpdateTemplateEntry
|
|
{
|
|
using NativeType = VkDescriptorUpdateTemplateEntry;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBinding = dstBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstArrayElement = dstArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, size_t const &, size_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( dstBinding == rhs.dstBinding )
|
|
&& ( dstArrayElement == rhs.dstArrayElement )
|
|
&& ( descriptorCount == rhs.descriptorCount )
|
|
&& ( descriptorType == rhs.descriptorType )
|
|
&& ( offset == rhs.offset )
|
|
&& ( stride == rhs.stride );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t dstBinding = {};
|
|
uint32_t dstArrayElement = {};
|
|
uint32_t descriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
size_t offset = {};
|
|
size_t stride = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" );
|
|
using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
|
|
|
|
struct DescriptorUpdateTemplateCreateInfo
|
|
{
|
|
using NativeType = VkDescriptorUpdateTemplateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
|
|
: flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
|
|
pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
templateType = templateType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetLayout = descriptorSetLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineLayout = pipelineLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
set = set_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * const &, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType const &, VULKAN_HPP_NAMESPACE::DescriptorSetLayout const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
|
|
&& ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
|
|
&& ( templateType == rhs.templateType )
|
|
&& ( descriptorSetLayout == rhs.descriptorSetLayout )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( pipelineLayout == rhs.pipelineLayout )
|
|
&& ( set == rhs.set );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
|
|
uint32_t descriptorUpdateEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
|
|
VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
|
|
uint32_t set = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value, "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
|
|
{
|
|
using Type = DescriptorUpdateTemplateCreateInfo;
|
|
};
|
|
using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
|
|
|
|
struct DeviceBufferMemoryRequirements
|
|
{
|
|
using NativeType = VkDeviceBufferMemoryRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pCreateInfo( pCreateInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceBufferMemoryRequirements( *reinterpret_cast<DeviceBufferMemoryRequirements const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCreateInfo = pCreateInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceBufferMemoryRequirements*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceBufferMemoryRequirements*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::BufferCreateInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pCreateInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pCreateInfo == rhs.pCreateInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceBufferMemoryRequirements>
|
|
{
|
|
using Type = DeviceBufferMemoryRequirements;
|
|
};
|
|
using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
|
|
|
|
struct DeviceQueueCreateInfo
|
|
{
|
|
using NativeType = VkDeviceQueueCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float * pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
|
|
: flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCount = queueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueuePriorities = pQueuePriorities_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCount = static_cast<uint32_t>( queuePriorities_.size() );
|
|
pQueuePriorities = queuePriorities_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &, const float * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
|
|
&& ( queueCount == rhs.queueCount )
|
|
&& ( pQueuePriorities == rhs.pQueuePriorities );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t queueCount = {};
|
|
const float * pQueuePriorities = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "DeviceQueueCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
|
|
{
|
|
using Type = DeviceQueueCreateInfo;
|
|
};
|
|
|
|
struct PhysicalDeviceFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceFeatures;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustBufferAccess = robustBufferAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fullDrawIndexUint32 = fullDrawIndexUint32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageCubeArray = imageCubeArray_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
independentBlend = independentBlend_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryShader = geometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tessellationShader = tessellationShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleRateShading = sampleRateShading_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dualSrcBlend = dualSrcBlend_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOp = logicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiDrawIndirect = multiDrawIndirect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drawIndirectFirstInstance = drawIndirectFirstInstance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClamp = depthClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasClamp = depthBiasClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fillModeNonSolid = fillModeNonSolid_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBounds = depthBounds_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
wideLines = wideLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
largePoints = largePoints_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToOne = alphaToOne_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiViewport = multiViewport_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerAnisotropy = samplerAnisotropy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionETC2 = textureCompressionETC2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionBC = textureCompressionBC_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
occlusionQueryPrecise = occlusionQueryPrecise_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatisticsQuery = pipelineStatisticsQuery_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageGatherExtended = shaderImageGatherExtended_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageMultisample = shaderStorageImageMultisample_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderClipDistance = shaderClipDistance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderCullDistance = shaderCullDistance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat64 = shaderFloat64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt64 = shaderInt64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt16 = shaderInt16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderResourceResidency = shaderResourceResidency_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderResourceMinLod = shaderResourceMinLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseBinding = sparseBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyBuffer = sparseResidencyBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyImage2D = sparseResidencyImage2D_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyImage3D = sparseResidencyImage3D_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency2Samples = sparseResidency2Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency4Samples = sparseResidency4Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency8Samples = sparseResidency8Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidency16Samples = sparseResidency16Samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseResidencyAliased = sparseResidencyAliased_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variableMultisampleRate = variableMultisampleRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inheritedQueries = inheritedQueries_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( robustBufferAccess, fullDrawIndexUint32, imageCubeArray, independentBlend, geometryShader, tessellationShader, sampleRateShading, dualSrcBlend, logicOp, multiDrawIndirect, drawIndirectFirstInstance, depthClamp, depthBiasClamp, fillModeNonSolid, depthBounds, wideLines, largePoints, alphaToOne, multiViewport, samplerAnisotropy, textureCompressionETC2, textureCompressionASTC_LDR, textureCompressionBC, occlusionQueryPrecise, pipelineStatisticsQuery, vertexPipelineStoresAndAtomics, fragmentStoresAndAtomics, shaderTessellationAndGeometryPointSize, shaderImageGatherExtended, shaderStorageImageExtendedFormats, shaderStorageImageMultisample, shaderStorageImageReadWithoutFormat, shaderStorageImageWriteWithoutFormat, shaderUniformBufferArrayDynamicIndexing, shaderSampledImageArrayDynamicIndexing, shaderStorageBufferArrayDynamicIndexing, shaderStorageImageArrayDynamicIndexing, shaderClipDistance, shaderCullDistance, shaderFloat64, shaderInt64, shaderInt16, shaderResourceResidency, shaderResourceMinLod, sparseBinding, sparseResidencyBuffer, sparseResidencyImage2D, sparseResidencyImage3D, sparseResidency2Samples, sparseResidency4Samples, sparseResidency8Samples, sparseResidency16Samples, sparseResidencyAliased, variableMultisampleRate, inheritedQueries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( robustBufferAccess == rhs.robustBufferAccess )
|
|
&& ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
|
|
&& ( imageCubeArray == rhs.imageCubeArray )
|
|
&& ( independentBlend == rhs.independentBlend )
|
|
&& ( geometryShader == rhs.geometryShader )
|
|
&& ( tessellationShader == rhs.tessellationShader )
|
|
&& ( sampleRateShading == rhs.sampleRateShading )
|
|
&& ( dualSrcBlend == rhs.dualSrcBlend )
|
|
&& ( logicOp == rhs.logicOp )
|
|
&& ( multiDrawIndirect == rhs.multiDrawIndirect )
|
|
&& ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
|
|
&& ( depthClamp == rhs.depthClamp )
|
|
&& ( depthBiasClamp == rhs.depthBiasClamp )
|
|
&& ( fillModeNonSolid == rhs.fillModeNonSolid )
|
|
&& ( depthBounds == rhs.depthBounds )
|
|
&& ( wideLines == rhs.wideLines )
|
|
&& ( largePoints == rhs.largePoints )
|
|
&& ( alphaToOne == rhs.alphaToOne )
|
|
&& ( multiViewport == rhs.multiViewport )
|
|
&& ( samplerAnisotropy == rhs.samplerAnisotropy )
|
|
&& ( textureCompressionETC2 == rhs.textureCompressionETC2 )
|
|
&& ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
|
|
&& ( textureCompressionBC == rhs.textureCompressionBC )
|
|
&& ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
|
|
&& ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
|
|
&& ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
|
|
&& ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
|
|
&& ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
|
|
&& ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
|
|
&& ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
|
|
&& ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
|
|
&& ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
|
|
&& ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
|
|
&& ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
|
|
&& ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
|
|
&& ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
|
|
&& ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
|
|
&& ( shaderClipDistance == rhs.shaderClipDistance )
|
|
&& ( shaderCullDistance == rhs.shaderCullDistance )
|
|
&& ( shaderFloat64 == rhs.shaderFloat64 )
|
|
&& ( shaderInt64 == rhs.shaderInt64 )
|
|
&& ( shaderInt16 == rhs.shaderInt16 )
|
|
&& ( shaderResourceResidency == rhs.shaderResourceResidency )
|
|
&& ( shaderResourceMinLod == rhs.shaderResourceMinLod )
|
|
&& ( sparseBinding == rhs.sparseBinding )
|
|
&& ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
|
|
&& ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
|
|
&& ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
|
|
&& ( sparseResidency2Samples == rhs.sparseResidency2Samples )
|
|
&& ( sparseResidency4Samples == rhs.sparseResidency4Samples )
|
|
&& ( sparseResidency8Samples == rhs.sparseResidency8Samples )
|
|
&& ( sparseResidency16Samples == rhs.sparseResidency16Samples )
|
|
&& ( sparseResidencyAliased == rhs.sparseResidencyAliased )
|
|
&& ( variableMultisampleRate == rhs.variableMultisampleRate )
|
|
&& ( inheritedQueries == rhs.inheritedQueries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "PhysicalDeviceFeatures is not nothrow_move_constructible!" );
|
|
|
|
struct DeviceCreateInfo
|
|
{
|
|
using NativeType = VkDeviceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} )
|
|
: flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCreateInfoCount = queueCreateInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueCreateInfos = pQueueCreateInfos_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
|
|
pQueueCreateInfos = queueCreateInfos_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = enabledLayerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledLayerNames = ppEnabledLayerNames_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
|
|
ppEnabledLayerNames = pEnabledLayerNames_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = enabledExtensionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledExtensionNames = ppEnabledExtensionNames_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
|
|
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEnabledFeatures = pEnabledFeatures_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueCreateInfoCount, pQueueCreateInfos, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames, pEnabledFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) return cmp;
|
|
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
|
|
for ( size_t i = 0; i < enabledLayerCount; ++i )
|
|
{
|
|
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
|
|
for ( size_t i = 0; i < enabledExtensionCount; ++i )
|
|
{
|
|
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( queueCreateInfoCount == rhs.queueCreateInfoCount )
|
|
&& ( pQueueCreateInfos == rhs.pQueueCreateInfos )
|
|
&& ( enabledLayerCount == rhs.enabledLayerCount )
|
|
&& [this, rhs]
|
|
{
|
|
bool equal = true;
|
|
for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i )
|
|
{
|
|
equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
|
|
}
|
|
return equal;
|
|
}()
|
|
&& ( enabledExtensionCount == rhs.enabledExtensionCount )
|
|
&& [this, rhs]
|
|
{
|
|
bool equal = true;
|
|
for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i )
|
|
{
|
|
equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) );
|
|
}
|
|
return equal;
|
|
}()
|
|
&& ( pEnabledFeatures == rhs.pEnabledFeatures );
|
|
}
|
|
|
|
bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
|
|
uint32_t queueCreateInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {};
|
|
uint32_t enabledLayerCount = {};
|
|
const char * const * ppEnabledLayerNames = {};
|
|
uint32_t enabledExtensionCount = {};
|
|
const char * const * ppEnabledExtensionNames = {};
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "DeviceCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceCreateInfo>
|
|
{
|
|
using Type = DeviceCreateInfo;
|
|
};
|
|
|
|
struct DeviceDeviceMemoryReportCreateInfoEXT
|
|
{
|
|
using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pfnUserCallback = pfnUserCallback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, PFN_vkDeviceMemoryReportCallbackEXT const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pfnUserCallback, pUserData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pfnUserCallback == rhs.pfnUserCallback )
|
|
&& ( pUserData == rhs.pUserData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
|
|
PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
|
|
void * pUserData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value, "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
|
|
{
|
|
using Type = DeviceDeviceMemoryReportCreateInfoEXT;
|
|
};
|
|
|
|
struct DeviceDiagnosticsConfigCreateInfoNV
|
|
{
|
|
using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value, "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
|
|
{
|
|
using Type = DeviceDiagnosticsConfigCreateInfoNV;
|
|
};
|
|
|
|
struct DeviceEventInfoEXT
|
|
{
|
|
using NativeType = VkDeviceEventInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
|
|
: deviceEvent( deviceEvent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceEvent = deviceEvent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceEvent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceEventInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceEvent == rhs.deviceEvent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "DeviceEventInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
|
|
{
|
|
using Type = DeviceEventInfoEXT;
|
|
};
|
|
|
|
struct DeviceGroupBindSparseInfo
|
|
{
|
|
using NativeType = VkDeviceGroupBindSparseInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resourceDeviceIndex = resourceDeviceIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryDeviceIndex = memoryDeviceIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( resourceDeviceIndex == rhs.resourceDeviceIndex )
|
|
&& ( memoryDeviceIndex == rhs.memoryDeviceIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
|
|
const void * pNext = {};
|
|
uint32_t resourceDeviceIndex = {};
|
|
uint32_t memoryDeviceIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
|
|
{
|
|
using Type = DeviceGroupBindSparseInfo;
|
|
};
|
|
using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
|
|
|
|
struct DeviceGroupCommandBufferBeginInfo
|
|
{
|
|
using NativeType = VkDeviceGroupCommandBufferBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceMask( deviceMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceMask == rhs.deviceMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
|
|
{
|
|
using Type = DeviceGroupCommandBufferBeginInfo;
|
|
};
|
|
using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
|
|
|
|
struct DeviceGroupDeviceCreateInfo
|
|
{
|
|
using NativeType = VkDeviceGroupDeviceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ )
|
|
: physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
physicalDeviceCount = physicalDeviceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPhysicalDevices = pPhysicalDevices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
|
|
pPhysicalDevices = physicalDevices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PhysicalDevice * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
|
|
&& ( pPhysicalDevices == rhs.pPhysicalDevices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t physicalDeviceCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
|
|
{
|
|
using Type = DeviceGroupDeviceCreateInfo;
|
|
};
|
|
using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
|
|
|
|
struct DeviceGroupPresentCapabilitiesKHR
|
|
{
|
|
using NativeType = VkDeviceGroupPresentCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: presentMask( presentMask_ ), modes( modes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, presentMask, modes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( presentMask == rhs.presentMask )
|
|
&& ( modes == rhs.modes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
|
|
{
|
|
using Type = DeviceGroupPresentCapabilitiesKHR;
|
|
};
|
|
|
|
struct DeviceGroupPresentInfoKHR
|
|
{
|
|
using NativeType = VkDeviceGroupPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t * pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal) VULKAN_HPP_NOEXCEPT
|
|
: swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
|
|
: swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceMasks = pDeviceMasks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
|
|
pDeviceMasks = deviceMasks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchainCount == rhs.swapchainCount )
|
|
&& ( pDeviceMasks == rhs.pDeviceMasks )
|
|
&& ( mode == rhs.mode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const uint32_t * pDeviceMasks = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
|
|
{
|
|
using Type = DeviceGroupPresentInfoKHR;
|
|
};
|
|
|
|
struct DeviceGroupRenderPassBeginInfo
|
|
{
|
|
using NativeType = VkDeviceGroupRenderPassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
|
|
: deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceRenderAreaCount = deviceRenderAreaCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDeviceRenderAreas = pDeviceRenderAreas_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
|
|
pDeviceRenderAreas = deviceRenderAreas_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceMask == rhs.deviceMask )
|
|
&& ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
|
|
&& ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t deviceMask = {};
|
|
uint32_t deviceRenderAreaCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
|
|
{
|
|
using Type = DeviceGroupRenderPassBeginInfo;
|
|
};
|
|
using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
|
|
|
|
struct DeviceGroupSubmitInfo
|
|
{
|
|
using NativeType = VkDeviceGroupSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t * pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t * pSignalSemaphoreDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
|
|
: waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
|
|
pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
|
|
pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = signalSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
|
|
pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphoreDeviceIndices, commandBufferCount, pCommandBufferDeviceMasks, signalSemaphoreCount, pSignalSemaphoreDeviceIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
|
|
&& ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
|
|
&& ( commandBufferCount == rhs.commandBufferCount )
|
|
&& ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
|
|
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
|
|
&& ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const uint32_t * pWaitSemaphoreDeviceIndices = {};
|
|
uint32_t commandBufferCount = {};
|
|
const uint32_t * pCommandBufferDeviceMasks = {};
|
|
uint32_t signalSemaphoreCount = {};
|
|
const uint32_t * pSignalSemaphoreDeviceIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "DeviceGroupSubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
|
|
{
|
|
using Type = DeviceGroupSubmitInfo;
|
|
};
|
|
using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
|
|
|
|
struct DeviceGroupSwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkDeviceGroupSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: modes( modes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
modes = modes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, modes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( modes == rhs.modes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = DeviceGroupSwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct ImageCreateInfo
|
|
{
|
|
using NativeType = VkImageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
|
|
: flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageType = imageType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLevels = mipLevels_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
arrayLayers = arrayLayers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialLayout = initialLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, imageType, format, extent, mipLevels, arrayLayers, samples, tiling, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices, initialLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( imageType == rhs.imageType )
|
|
&& ( format == rhs.format )
|
|
&& ( extent == rhs.extent )
|
|
&& ( mipLevels == rhs.mipLevels )
|
|
&& ( arrayLayers == rhs.arrayLayers )
|
|
&& ( samples == rhs.samples )
|
|
&& ( tiling == rhs.tiling )
|
|
&& ( usage == rhs.usage )
|
|
&& ( sharingMode == rhs.sharingMode )
|
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
|
|
&& ( initialLayout == rhs.initialLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
uint32_t mipLevels = {};
|
|
uint32_t arrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "ImageCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageCreateInfo>
|
|
{
|
|
using Type = ImageCreateInfo;
|
|
};
|
|
|
|
struct DeviceImageMemoryRequirements
|
|
{
|
|
using NativeType = VkDeviceImageMemoryRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
|
|
: pCreateInfo( pCreateInfo_ ), planeAspect( planeAspect_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceImageMemoryRequirements( *reinterpret_cast<DeviceImageMemoryRequirements const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCreateInfo = pCreateInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceImageMemoryRequirements*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceImageMemoryRequirements*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pCreateInfo, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceImageMemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pCreateInfo == rhs.pCreateInfo )
|
|
&& ( planeAspect == rhs.planeAspect );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "DeviceImageMemoryRequirements is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceImageMemoryRequirements>
|
|
{
|
|
using Type = DeviceImageMemoryRequirements;
|
|
};
|
|
using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
|
|
|
|
struct DeviceMemoryOpaqueCaptureAddressInfo
|
|
{
|
|
using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value, "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
|
|
{
|
|
using Type = DeviceMemoryOpaqueCaptureAddressInfo;
|
|
};
|
|
using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
|
|
|
|
struct DeviceMemoryOverallocationCreateInfoAMD
|
|
{
|
|
using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault) VULKAN_HPP_NOEXCEPT
|
|
: overallocationBehavior( overallocationBehavior_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
overallocationBehavior = overallocationBehavior_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, overallocationBehavior );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( overallocationBehavior == rhs.overallocationBehavior );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value, "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
|
|
{
|
|
using Type = DeviceMemoryOverallocationCreateInfoAMD;
|
|
};
|
|
|
|
struct DeviceMemoryReportCallbackDataEXT
|
|
{
|
|
using NativeType = VkDeviceMemoryReportCallbackDataEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), type( type_ ), memoryObjectId( memoryObjectId_ ), size( size_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), heapIndex( heapIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT const &, uint64_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::ObjectType const &, uint64_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( type == rhs.type )
|
|
&& ( memoryObjectId == rhs.memoryObjectId )
|
|
&& ( size == rhs.size )
|
|
&& ( objectType == rhs.objectType )
|
|
&& ( objectHandle == rhs.objectHandle )
|
|
&& ( heapIndex == rhs.heapIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
|
|
uint64_t memoryObjectId = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
|
|
uint64_t objectHandle = {};
|
|
uint32_t heapIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
|
|
{
|
|
using Type = DeviceMemoryReportCallbackDataEXT;
|
|
};
|
|
|
|
struct DevicePrivateDataCreateInfo
|
|
{
|
|
using NativeType = VkDevicePrivateDataCreateInfo;
|
|
|
|
static const bool allowDuplicate = true;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo(uint32_t privateDataSlotRequestCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: privateDataSlotRequestCount( privateDataSlotRequestCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DevicePrivateDataCreateInfo( *reinterpret_cast<DevicePrivateDataCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateDataSlotRequestCount = privateDataSlotRequestCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDevicePrivateDataCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDevicePrivateDataCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, privateDataSlotRequestCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t privateDataSlotRequestCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfo>
|
|
{
|
|
using Type = DevicePrivateDataCreateInfo;
|
|
};
|
|
using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
|
|
|
|
struct DeviceQueueGlobalPriorityCreateInfoKHR
|
|
{
|
|
using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow) VULKAN_HPP_NOEXCEPT
|
|
: globalPriority( globalPriority_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
globalPriority = globalPriority_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, globalPriority );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( globalPriority == rhs.globalPriority );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value, "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR>
|
|
{
|
|
using Type = DeviceQueueGlobalPriorityCreateInfoKHR;
|
|
};
|
|
using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR;
|
|
|
|
struct DeviceQueueInfo2
|
|
{
|
|
using NativeType = VkDeviceQueueInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueIndex = queueIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DeviceQueueInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
|
|
&& ( queueIndex == rhs.queueIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t queueIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "DeviceQueueInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
|
|
{
|
|
using Type = DeviceQueueInfo2;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
|
|
struct DirectFBSurfaceCreateInfoEXT
|
|
{
|
|
using NativeType = VkDirectFBSurfaceCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB * dfb_ = {}, IDirectFBSurface * surface_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), dfb( dfb_ ), surface( surface_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dfb = dfb_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT const &, IDirectFB * const &, IDirectFBSurface * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, dfb, surface );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( dfb == rhs.dfb )
|
|
&& ( surface == rhs.surface );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
|
|
IDirectFB * dfb = {};
|
|
IDirectFBSurface * surface = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
|
|
{
|
|
using Type = DirectFBSurfaceCreateInfoEXT;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
|
|
|
|
struct DispatchIndirectCommand
|
|
{
|
|
using NativeType = VkDispatchIndirectCommand;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ ), z( z_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
z = z_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
|
|
}
|
|
|
|
explicit operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, z );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DispatchIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y )
|
|
&& ( z == rhs.z );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t x = {};
|
|
uint32_t y = {};
|
|
uint32_t z = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "DispatchIndirectCommand is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayEventInfoEXT
|
|
{
|
|
using NativeType = VkDisplayEventInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut) VULKAN_HPP_NOEXCEPT
|
|
: displayEvent( displayEvent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayEvent = displayEvent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayEvent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayEventInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( displayEvent == rhs.displayEvent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "DisplayEventInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
|
|
{
|
|
using Type = DisplayEventInfoEXT;
|
|
};
|
|
|
|
struct DisplayModeParametersKHR
|
|
{
|
|
using NativeType = VkDisplayModeParametersKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
visibleRegion = visibleRegion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
refreshRate = refreshRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( visibleRegion, refreshRate );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayModeParametersKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( visibleRegion == rhs.visibleRegion )
|
|
&& ( refreshRate == rhs.refreshRate );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
|
|
uint32_t refreshRate = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "DisplayModeParametersKHR is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayModeCreateInfoKHR
|
|
{
|
|
using NativeType = VkDisplayModeCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), parameters( parameters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
parameters = parameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, parameters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( parameters == rhs.parameters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
|
|
{
|
|
using Type = DisplayModeCreateInfoKHR;
|
|
};
|
|
|
|
struct DisplayModePropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayModePropertiesKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: displayMode( displayMode_ ), parameters( parameters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( displayMode, parameters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( displayMode == rhs.displayMode )
|
|
&& ( parameters == rhs.parameters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "DisplayModePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayModeProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayModeProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: displayModeProperties( displayModeProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayModeProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( displayModeProperties == rhs.displayModeProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "DisplayModeProperties2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
|
|
{
|
|
using Type = DisplayModeProperties2KHR;
|
|
};
|
|
|
|
struct DisplayNativeHdrSurfaceCapabilitiesAMD
|
|
{
|
|
using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: localDimmingSupport( localDimmingSupport_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, localDimmingSupport );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( localDimmingSupport == rhs.localDimmingSupport );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
|
|
{
|
|
using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
|
|
};
|
|
|
|
struct DisplayPlaneCapabilitiesKHR
|
|
{
|
|
using NativeType = VkDisplayPlaneCapabilitiesKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( supportedAlpha == rhs.supportedAlpha )
|
|
&& ( minSrcPosition == rhs.minSrcPosition )
|
|
&& ( maxSrcPosition == rhs.maxSrcPosition )
|
|
&& ( minSrcExtent == rhs.minSrcExtent )
|
|
&& ( maxSrcExtent == rhs.maxSrcExtent )
|
|
&& ( minDstPosition == rhs.minDstPosition )
|
|
&& ( maxDstPosition == rhs.maxDstPosition )
|
|
&& ( minDstExtent == rhs.minDstExtent )
|
|
&& ( maxDstExtent == rhs.maxDstExtent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayPlaneCapabilities2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneCapabilities2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: capabilities( capabilities_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, capabilities );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( capabilities == rhs.capabilities );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
|
|
{
|
|
using Type = DisplayPlaneCapabilities2KHR;
|
|
};
|
|
|
|
struct DisplayPlaneInfo2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneInfo2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: mode( mode_ ), planeIndex( planeIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mode = mode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeIndex = planeIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, mode, planeIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( mode == rhs.mode )
|
|
&& ( planeIndex == rhs.planeIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
|
|
uint32_t planeIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
|
|
{
|
|
using Type = DisplayPlaneInfo2KHR;
|
|
};
|
|
|
|
struct DisplayPlanePropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayPlanePropertiesKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( currentDisplay, currentStackIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( currentDisplay == rhs.currentDisplay )
|
|
&& ( currentStackIndex == rhs.currentStackIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
|
|
uint32_t currentStackIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayPlaneProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayPlaneProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: displayPlaneProperties( displayPlaneProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayPlaneProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( displayPlaneProperties == rhs.displayPlaneProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
|
|
{
|
|
using Type = DisplayPlaneProperties2KHR;
|
|
};
|
|
|
|
struct DisplayPowerInfoEXT
|
|
{
|
|
using NativeType = VkDisplayPowerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff) VULKAN_HPP_NOEXCEPT
|
|
: powerState( powerState_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
powerState = powerState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, powerState );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( powerState == rhs.powerState );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "DisplayPowerInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
|
|
{
|
|
using Type = DisplayPowerInfoEXT;
|
|
};
|
|
|
|
struct DisplayPresentInfoKHR
|
|
{
|
|
using NativeType = VkDisplayPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcRect = srcRect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstRect = dstRect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
persistent = persistent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Rect2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcRect, dstRect, persistent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcRect == rhs.srcRect )
|
|
&& ( dstRect == rhs.dstRect )
|
|
&& ( persistent == rhs.persistent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "DisplayPresentInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
|
|
{
|
|
using Type = DisplayPresentInfoKHR;
|
|
};
|
|
|
|
struct DisplayPropertiesKHR
|
|
{
|
|
using NativeType = VkDisplayPropertiesKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char * displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DisplayKHR const &, const char * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = display <=> rhs.display; cmp != 0 ) return cmp;
|
|
if ( displayName != rhs.displayName )
|
|
if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) return cmp;
|
|
if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) return cmp;
|
|
if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) return cmp;
|
|
if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) return cmp;
|
|
if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( display == rhs.display )
|
|
&& ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) )
|
|
&& ( physicalDimensions == rhs.physicalDimensions )
|
|
&& ( physicalResolution == rhs.physicalResolution )
|
|
&& ( supportedTransforms == rhs.supportedTransforms )
|
|
&& ( planeReorderPossible == rhs.planeReorderPossible )
|
|
&& ( persistentContent == rhs.persistentContent );
|
|
}
|
|
|
|
bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
|
|
const char * displayName = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "DisplayPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
struct DisplayProperties2KHR
|
|
{
|
|
using NativeType = VkDisplayProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: displayProperties( displayProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplayProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( displayProperties == rhs.displayProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "DisplayProperties2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
|
|
{
|
|
using Type = DisplayProperties2KHR;
|
|
};
|
|
|
|
struct DisplaySurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkDisplaySurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayMode = displayMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeIndex = planeIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeStackIndex = planeStackIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
globalAlpha = globalAlpha_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaMode = alphaMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::DisplayModeKHR const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, float const &, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( displayMode == rhs.displayMode )
|
|
&& ( planeIndex == rhs.planeIndex )
|
|
&& ( planeStackIndex == rhs.planeStackIndex )
|
|
&& ( transform == rhs.transform )
|
|
&& ( globalAlpha == rhs.globalAlpha )
|
|
&& ( alphaMode == rhs.alphaMode )
|
|
&& ( imageExtent == rhs.imageExtent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
|
|
uint32_t planeIndex = {};
|
|
uint32_t planeStackIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
float globalAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
|
|
{
|
|
using Type = DisplaySurfaceCreateInfoKHR;
|
|
};
|
|
|
|
struct DrawIndexedIndirectCommand
|
|
{
|
|
using NativeType = VkDrawIndexedIndirectCommand;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexCount = indexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCount = instanceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstIndex = firstIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexOffset = vertexOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstInstance = firstInstance_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
|
|
}
|
|
|
|
explicit operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, int32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( indexCount == rhs.indexCount )
|
|
&& ( instanceCount == rhs.instanceCount )
|
|
&& ( firstIndex == rhs.firstIndex )
|
|
&& ( vertexOffset == rhs.vertexOffset )
|
|
&& ( firstInstance == rhs.firstInstance );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t indexCount = {};
|
|
uint32_t instanceCount = {};
|
|
uint32_t firstIndex = {};
|
|
int32_t vertexOffset = {};
|
|
uint32_t firstInstance = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "DrawIndexedIndirectCommand is not nothrow_move_constructible!" );
|
|
|
|
struct DrawIndirectCommand
|
|
{
|
|
using NativeType = VkDrawIndirectCommand;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexCount = vertexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
instanceCount = instanceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstVertex = firstVertex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstInstance = firstInstance_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
|
|
}
|
|
|
|
explicit operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrawIndirectCommand*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( vertexCount, instanceCount, firstVertex, firstInstance );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrawIndirectCommand const & ) const = default;
|
|
#else
|
|
bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( vertexCount == rhs.vertexCount )
|
|
&& ( instanceCount == rhs.instanceCount )
|
|
&& ( firstVertex == rhs.firstVertex )
|
|
&& ( firstInstance == rhs.firstInstance );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t vertexCount = {};
|
|
uint32_t instanceCount = {};
|
|
uint32_t firstVertex = {};
|
|
uint32_t firstInstance = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "DrawIndirectCommand is not nothrow_move_constructible!" );
|
|
|
|
struct DrawMeshTasksIndirectCommandNV
|
|
{
|
|
using NativeType = VkDrawMeshTasksIndirectCommandNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: taskCount( taskCount_ ), firstTask( firstTask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
taskCount = taskCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstTask = firstTask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
|
|
}
|
|
|
|
explicit operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( taskCount, firstTask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default;
|
|
#else
|
|
bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( taskCount == rhs.taskCount )
|
|
&& ( firstTask == rhs.firstTask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t taskCount = {};
|
|
uint32_t firstTask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" );
|
|
|
|
struct DrmFormatModifierProperties2EXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierProperties2EXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierProperties2EXT( *reinterpret_cast<DrmFormatModifierProperties2EXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierProperties2EXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierProperties2EXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( drmFormatModifier == rhs.drmFormatModifier )
|
|
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
|
|
&& ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" );
|
|
|
|
struct DrmFormatModifierPropertiesEXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint64_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( drmFormatModifier == rhs.drmFormatModifier )
|
|
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
|
|
&& ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
struct DrmFormatModifierPropertiesList2EXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesList2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesList2EXT( *reinterpret_cast<DrmFormatModifierPropertiesList2EXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DrmFormatModifierPropertiesList2EXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT> const & drmFormatModifierProperties_ )
|
|
: drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesList2EXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesList2EXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
|
|
&& ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT;
|
|
void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value, "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesList2EXT>
|
|
{
|
|
using Type = DrmFormatModifierPropertiesList2EXT;
|
|
};
|
|
|
|
struct DrmFormatModifierPropertiesListEXT
|
|
{
|
|
using NativeType = VkDrmFormatModifierPropertiesListEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_ )
|
|
: drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
|
|
#else
|
|
bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
|
|
&& ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
|
|
void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value, "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
|
|
{
|
|
using Type = DrmFormatModifierPropertiesListEXT;
|
|
};
|
|
|
|
struct EventCreateInfo
|
|
{
|
|
using NativeType = VkEventCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkEventCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkEventCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::EventCreateFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( EventCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "EventCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eEventCreateInfo>
|
|
{
|
|
using Type = EventCreateInfo;
|
|
};
|
|
|
|
struct ExportFenceCreateInfo
|
|
{
|
|
using NativeType = VkExportFenceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportFenceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "ExportFenceCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
|
|
{
|
|
using Type = ExportFenceCreateInfo;
|
|
};
|
|
using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ExportFenceWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkExportFenceWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dwAccess = dwAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes, dwAccess, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pAttributes == rhs.pAttributes )
|
|
&& ( dwAccess == rhs.dwAccess )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
const SECURITY_ATTRIBUTES * pAttributes = {};
|
|
DWORD dwAccess = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
|
|
{
|
|
using Type = ExportFenceWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct ExportMemoryAllocateInfo
|
|
{
|
|
using NativeType = VkExportMemoryAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "ExportMemoryAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
|
|
{
|
|
using Type = ExportMemoryAllocateInfo;
|
|
};
|
|
using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
|
|
|
|
struct ExportMemoryAllocateInfoNV
|
|
{
|
|
using NativeType = VkExportMemoryAllocateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
|
|
{
|
|
using Type = ExportMemoryAllocateInfoNV;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ExportMemoryWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkExportMemoryWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dwAccess = dwAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes, dwAccess, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pAttributes == rhs.pAttributes )
|
|
&& ( dwAccess == rhs.dwAccess )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
const SECURITY_ATTRIBUTES * pAttributes = {};
|
|
DWORD dwAccess = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
|
|
{
|
|
using Type = ExportMemoryWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ExportMemoryWin32HandleInfoNV
|
|
{
|
|
using NativeType = VkExportMemoryWin32HandleInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pAttributes( pAttributes_ ), dwAccess( dwAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dwAccess = dwAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes, dwAccess );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pAttributes == rhs.pAttributes )
|
|
&& ( dwAccess == rhs.dwAccess );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
|
|
const void * pNext = {};
|
|
const SECURITY_ATTRIBUTES * pAttributes = {};
|
|
DWORD dwAccess = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
|
|
{
|
|
using Type = ExportMemoryWin32HandleInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct ExportSemaphoreCreateInfo
|
|
{
|
|
using NativeType = VkExportSemaphoreCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
|
|
{
|
|
using Type = ExportSemaphoreCreateInfo;
|
|
};
|
|
using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ExportSemaphoreWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkExportSemaphoreWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttributes = pAttributes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dwAccess = dwAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const SECURITY_ATTRIBUTES * const &, DWORD const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pAttributes, dwAccess, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pAttributes == rhs.pAttributes )
|
|
&& ( dwAccess == rhs.dwAccess )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
const SECURITY_ATTRIBUTES * pAttributes = {};
|
|
DWORD dwAccess = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
|
|
{
|
|
using Type = ExportSemaphoreWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct ExtensionProperties
|
|
{
|
|
using NativeType = VkExtensionProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: extensionName( extensionName_ ), specVersion( specVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExtensionProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExtensionProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( extensionName, specVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExtensionProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( extensionName == rhs.extensionName )
|
|
&& ( specVersion == rhs.specVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
|
|
uint32_t specVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "ExtensionProperties is not nothrow_move_constructible!" );
|
|
|
|
struct ExternalMemoryProperties
|
|
{
|
|
using NativeType = VkExternalMemoryProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalMemoryProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
|
|
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
|
|
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "ExternalMemoryProperties is not nothrow_move_constructible!" );
|
|
using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
|
|
|
|
struct ExternalBufferProperties
|
|
{
|
|
using NativeType = VkExternalBufferProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: externalMemoryProperties( externalMemoryProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalBufferProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalBufferProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalMemoryProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalBufferProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( externalMemoryProperties == rhs.externalMemoryProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "ExternalBufferProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalBufferProperties>
|
|
{
|
|
using Type = ExternalBufferProperties;
|
|
};
|
|
using ExternalBufferPropertiesKHR = ExternalBufferProperties;
|
|
|
|
struct ExternalFenceProperties
|
|
{
|
|
using NativeType = VkExternalFenceProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalFenceProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalFenceProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalFenceProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
|
|
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes )
|
|
&& ( externalFenceFeatures == rhs.externalFenceFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "ExternalFenceProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalFenceProperties>
|
|
{
|
|
using Type = ExternalFenceProperties;
|
|
};
|
|
using ExternalFencePropertiesKHR = ExternalFenceProperties;
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct ExternalFormatANDROID
|
|
{
|
|
using NativeType = VkExternalFormatANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: externalFormat( externalFormat_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
externalFormat = externalFormat_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalFormatANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalFormat );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalFormatANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( externalFormat == rhs.externalFormat );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
|
|
void * pNext = {};
|
|
uint64_t externalFormat = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "ExternalFormatANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalFormatANDROID>
|
|
{
|
|
using Type = ExternalFormatANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
struct ExternalImageFormatProperties
|
|
{
|
|
using NativeType = VkExternalImageFormatProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: externalMemoryProperties( externalMemoryProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalMemoryProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( externalMemoryProperties == rhs.externalMemoryProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "ExternalImageFormatProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
|
|
{
|
|
using Type = ExternalImageFormatProperties;
|
|
};
|
|
using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
|
|
|
|
struct ImageFormatProperties
|
|
{
|
|
using NativeType = VkImageFormatProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Extent3D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( maxExtent == rhs.maxExtent )
|
|
&& ( maxMipLevels == rhs.maxMipLevels )
|
|
&& ( maxArrayLayers == rhs.maxArrayLayers )
|
|
&& ( sampleCounts == rhs.sampleCounts )
|
|
&& ( maxResourceSize == rhs.maxResourceSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
|
|
uint32_t maxMipLevels = {};
|
|
uint32_t maxArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "ImageFormatProperties is not nothrow_move_constructible!" );
|
|
|
|
struct ExternalImageFormatPropertiesNV
|
|
{
|
|
using NativeType = VkExternalImageFormatPropertiesNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageFormatProperties const &, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( imageFormatProperties == rhs.imageFormatProperties )
|
|
&& ( externalMemoryFeatures == rhs.externalMemoryFeatures )
|
|
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
|
|
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
struct ExternalMemoryBufferCreateInfo
|
|
{
|
|
using NativeType = VkExternalMemoryBufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
|
|
{
|
|
using Type = ExternalMemoryBufferCreateInfo;
|
|
};
|
|
using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
|
|
|
|
struct ExternalMemoryImageCreateInfo
|
|
{
|
|
using NativeType = VkExternalMemoryImageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
|
|
{
|
|
using Type = ExternalMemoryImageCreateInfo;
|
|
};
|
|
using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
|
|
|
|
struct ExternalMemoryImageCreateInfoNV
|
|
{
|
|
using NativeType = VkExternalMemoryImageCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleTypes( handleTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleTypes = handleTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleTypes == rhs.handleTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
|
|
{
|
|
using Type = ExternalMemoryImageCreateInfoNV;
|
|
};
|
|
|
|
struct ExternalSemaphoreProperties
|
|
{
|
|
using NativeType = VkExternalSemaphoreProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
|
|
#else
|
|
bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
|
|
&& ( compatibleHandleTypes == rhs.compatibleHandleTypes )
|
|
&& ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "ExternalSemaphoreProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
|
|
{
|
|
using Type = ExternalSemaphoreProperties;
|
|
};
|
|
using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
|
|
|
|
struct FenceCreateInfo
|
|
{
|
|
using NativeType = VkFenceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FenceCreateFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FenceCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "FenceCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceCreateInfo>
|
|
{
|
|
using Type = FenceCreateInfo;
|
|
};
|
|
|
|
struct FenceGetFdInfoKHR
|
|
{
|
|
using NativeType = VkFenceGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: fence( fence_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fence == rhs.fence )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "FenceGetFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
|
|
{
|
|
using Type = FenceGetFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct FenceGetWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkFenceGetWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: fence( fence_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fence == rhs.fence )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
|
|
{
|
|
using Type = FenceGetWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct FilterCubicImageViewImageFormatPropertiesEXT
|
|
{
|
|
using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, filterCubic, filterCubicMinmax );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( filterCubic == rhs.filterCubic )
|
|
&& ( filterCubicMinmax == rhs.filterCubicMinmax );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value, "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
|
|
{
|
|
using Type = FilterCubicImageViewImageFormatPropertiesEXT;
|
|
};
|
|
|
|
struct FormatProperties
|
|
{
|
|
using NativeType = VkFormatProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( linearTilingFeatures == rhs.linearTilingFeatures )
|
|
&& ( optimalTilingFeatures == rhs.optimalTilingFeatures )
|
|
&& ( bufferFeatures == rhs.bufferFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "FormatProperties is not nothrow_move_constructible!" );
|
|
|
|
struct FormatProperties2
|
|
{
|
|
using NativeType = VkFormatProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: formatProperties( formatProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FormatProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( formatProperties == rhs.formatProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "FormatProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFormatProperties2>
|
|
{
|
|
using Type = FormatProperties2;
|
|
};
|
|
using FormatProperties2KHR = FormatProperties2;
|
|
|
|
struct FormatProperties3
|
|
{
|
|
using NativeType = VkFormatProperties3;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FormatProperties3(VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FormatProperties3( *reinterpret_cast<FormatProperties3 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties3 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFormatProperties3*>( this );
|
|
}
|
|
|
|
explicit operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFormatProperties3*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FormatProperties3 const & ) const = default;
|
|
#else
|
|
bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( linearTilingFeatures == rhs.linearTilingFeatures )
|
|
&& ( optimalTilingFeatures == rhs.optimalTilingFeatures )
|
|
&& ( bufferFeatures == rhs.bufferFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "FormatProperties3 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFormatProperties3>
|
|
{
|
|
using Type = FormatProperties3;
|
|
};
|
|
using FormatProperties3KHR = FormatProperties3;
|
|
|
|
struct FragmentShadingRateAttachmentInfoKHR
|
|
{
|
|
using NativeType = VkFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment )
|
|
&& ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value, "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
|
|
{
|
|
using Type = FragmentShadingRateAttachmentInfoKHR;
|
|
};
|
|
|
|
struct FramebufferAttachmentImageInfo
|
|
{
|
|
using NativeType = VkFramebufferAttachmentImageInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
|
|
: flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = viewFormatCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewFormats = pViewFormats_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
|
|
pViewFormats = viewFormats_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( usage == rhs.usage )
|
|
&& ( width == rhs.width )
|
|
&& ( height == rhs.height )
|
|
&& ( layerCount == rhs.layerCount )
|
|
&& ( viewFormatCount == rhs.viewFormatCount )
|
|
&& ( pViewFormats == rhs.pViewFormats );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t layerCount = {};
|
|
uint32_t viewFormatCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
|
|
{
|
|
using Type = FramebufferAttachmentImageInfo;
|
|
};
|
|
using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
|
|
|
|
struct FramebufferAttachmentsCreateInfo
|
|
{
|
|
using NativeType = VkFramebufferAttachmentsCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
|
|
: attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentImageInfoCount = attachmentImageInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachmentImageInfos = pAttachmentImageInfos_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
|
|
pAttachmentImageInfos = attachmentImageInfos_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
|
|
&& ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t attachmentImageInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
|
|
{
|
|
using Type = FramebufferAttachmentsCreateInfo;
|
|
};
|
|
using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
|
|
|
|
struct FramebufferCreateInfo
|
|
{
|
|
using NativeType = VkFramebufferCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {} )
|
|
: flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layers = layers_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FramebufferCreateFlags const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FramebufferCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( renderPass == rhs.renderPass )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pAttachments == rhs.pAttachments )
|
|
&& ( width == rhs.width )
|
|
&& ( height == rhs.height )
|
|
&& ( layers == rhs.layers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t layers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "FramebufferCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
|
|
{
|
|
using Type = FramebufferCreateInfo;
|
|
};
|
|
|
|
struct FramebufferMixedSamplesCombinationNV
|
|
{
|
|
using NativeType = VkFramebufferMixedSamplesCombinationNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
|
|
}
|
|
|
|
explicit operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default;
|
|
#else
|
|
bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( coverageReductionMode == rhs.coverageReductionMode )
|
|
&& ( rasterizationSamples == rhs.rasterizationSamples )
|
|
&& ( depthStencilSamples == rhs.depthStencilSamples )
|
|
&& ( colorSamples == rhs.colorSamples );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value, "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
|
|
{
|
|
using Type = FramebufferMixedSamplesCombinationNV;
|
|
};
|
|
|
|
struct IndirectCommandsStreamNV
|
|
{
|
|
using NativeType = VkIndirectCommandsStreamNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ ), offset( offset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
|
|
}
|
|
|
|
explicit operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( buffer, offset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( IndirectCommandsStreamNV const & ) const = default;
|
|
#else
|
|
bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( buffer == rhs.buffer )
|
|
&& ( offset == rhs.offset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "IndirectCommandsStreamNV is not nothrow_move_constructible!" );
|
|
|
|
struct GeneratedCommandsInfoNV
|
|
{
|
|
using NativeType = VkGeneratedCommandsInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
|
|
: pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipeline = pipeline_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indirectCommandsLayout = indirectCommandsLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
streamCount = streamCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStreams = pStreams_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
streamCount = static_cast<uint32_t>( streams_.size() );
|
|
pStreams = streams_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sequencesCount = sequencesCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preprocessBuffer = preprocessBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preprocessOffset = preprocessOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preprocessSize = preprocessSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sequencesCountBuffer = sequencesCountBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sequencesCountOffset = sequencesCountOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sequencesIndexBuffer = sequencesIndexBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sequencesIndexOffset = sequencesIndexOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, streamCount, pStreams, sequencesCount, preprocessBuffer, preprocessOffset, preprocessSize, sequencesCountBuffer, sequencesCountOffset, sequencesIndexBuffer, sequencesIndexOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeneratedCommandsInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( pipeline == rhs.pipeline )
|
|
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
|
|
&& ( streamCount == rhs.streamCount )
|
|
&& ( pStreams == rhs.pStreams )
|
|
&& ( sequencesCount == rhs.sequencesCount )
|
|
&& ( preprocessBuffer == rhs.preprocessBuffer )
|
|
&& ( preprocessOffset == rhs.preprocessOffset )
|
|
&& ( preprocessSize == rhs.preprocessSize )
|
|
&& ( sequencesCountBuffer == rhs.sequencesCountBuffer )
|
|
&& ( sequencesCountOffset == rhs.sequencesCountOffset )
|
|
&& ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
|
|
&& ( sequencesIndexOffset == rhs.sequencesIndexOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
|
|
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
|
|
uint32_t streamCount = {};
|
|
const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {};
|
|
uint32_t sequencesCount = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "GeneratedCommandsInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
|
|
{
|
|
using Type = GeneratedCommandsInfoNV;
|
|
};
|
|
|
|
struct GeneratedCommandsMemoryRequirementsInfoNV
|
|
{
|
|
using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipeline = pipeline_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indirectCommandsLayout = indirectCommandsLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSequencesCount = maxSequencesCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, VULKAN_HPP_NAMESPACE::Pipeline const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( pipeline == rhs.pipeline )
|
|
&& ( indirectCommandsLayout == rhs.indirectCommandsLayout )
|
|
&& ( maxSequencesCount == rhs.maxSequencesCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
|
|
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
|
|
uint32_t maxSequencesCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value, "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
|
|
{
|
|
using Type = GeneratedCommandsMemoryRequirementsInfoNV;
|
|
};
|
|
|
|
struct VertexInputBindingDescription
|
|
{
|
|
using NativeType = VkVertexInputBindingDescription;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ ), stride( stride_ ), inputRate( inputRate_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputRate = inputRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
|
|
}
|
|
|
|
explicit operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( binding, stride, inputRate );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VertexInputBindingDescription const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( binding == rhs.binding )
|
|
&& ( stride == rhs.stride )
|
|
&& ( inputRate == rhs.inputRate );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t binding = {};
|
|
uint32_t stride = {};
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "VertexInputBindingDescription is not nothrow_move_constructible!" );
|
|
|
|
struct VertexInputAttributeDescription
|
|
{
|
|
using NativeType = VkVertexInputAttributeDescription;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
location = location_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
|
|
}
|
|
|
|
explicit operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( location, binding, format, offset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VertexInputAttributeDescription const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( location == rhs.location )
|
|
&& ( binding == rhs.binding )
|
|
&& ( format == rhs.format )
|
|
&& ( offset == rhs.offset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t location = {};
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint32_t offset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "VertexInputAttributeDescription is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineVertexInputStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineVertexInputStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {} )
|
|
: flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexBindingDescriptions = pVertexBindingDescriptions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
|
|
pVertexBindingDescriptions = vertexBindingDescriptions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
|
|
pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
|
|
&& ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
|
|
&& ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
|
|
&& ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
|
|
uint32_t vertexBindingDescriptionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {};
|
|
uint32_t vertexAttributeDescriptionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value, "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
|
|
{
|
|
using Type = PipelineVertexInputStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineInputAssemblyStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineInputAssemblyStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
topology = topology_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveRestartEnable = primitiveRestartEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags const &, VULKAN_HPP_NAMESPACE::PrimitiveTopology const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, topology, primitiveRestartEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( topology == rhs.topology )
|
|
&& ( primitiveRestartEnable == rhs.primitiveRestartEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value, "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
|
|
{
|
|
using Type = PipelineInputAssemblyStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineTessellationStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineTessellationStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), patchControlPoints( patchControlPoints_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
patchControlPoints = patchControlPoints_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, patchControlPoints );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( patchControlPoints == rhs.patchControlPoints );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
|
|
uint32_t patchControlPoints = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value, "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
|
|
{
|
|
using Type = PipelineTessellationStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineViewportStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineViewportStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {} )
|
|
: flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = viewportCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewports = pViewports_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = static_cast<uint32_t>( viewports_.size() );
|
|
pViewports = viewports_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scissorCount = scissorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pScissors = pScissors_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scissorCount = static_cast<uint32_t>( scissors_.size() );
|
|
pScissors = scissors_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Viewport * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( viewportCount == rhs.viewportCount )
|
|
&& ( pViewports == rhs.pViewports )
|
|
&& ( scissorCount == rhs.scissorCount )
|
|
&& ( pScissors == rhs.pScissors );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
|
|
uint32_t viewportCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {};
|
|
uint32_t scissorCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
|
|
{
|
|
using Type = PipelineViewportStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineRasterizationStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineRasterizationStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClampEnable = depthClampEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizerDiscardEnable = rasterizerDiscardEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
polygonMode = polygonMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cullMode = cullMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frontFace = frontFace_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasEnable = depthBiasEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasConstantFactor = depthBiasConstantFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasClamp = depthBiasClamp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBiasSlopeFactor = depthBiasSlopeFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lineWidth = lineWidth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PolygonMode const &, VULKAN_HPP_NAMESPACE::CullModeFlags const &, VULKAN_HPP_NAMESPACE::FrontFace const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, float const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, depthClampEnable, rasterizerDiscardEnable, polygonMode, cullMode, frontFace, depthBiasEnable, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor, lineWidth );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( depthClampEnable == rhs.depthClampEnable )
|
|
&& ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
|
|
&& ( polygonMode == rhs.polygonMode )
|
|
&& ( cullMode == rhs.cullMode )
|
|
&& ( frontFace == rhs.frontFace )
|
|
&& ( depthBiasEnable == rhs.depthBiasEnable )
|
|
&& ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
|
|
&& ( depthBiasClamp == rhs.depthBiasClamp )
|
|
&& ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
|
|
&& ( lineWidth == rhs.lineWidth );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
|
|
VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
|
|
VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
|
|
VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
|
|
float depthBiasConstantFactor = {};
|
|
float depthBiasClamp = {};
|
|
float depthBiasSlopeFactor = {};
|
|
float lineWidth = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value, "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
|
|
{
|
|
using Type = PipelineRasterizationStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineMultisampleStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineMultisampleStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationSamples = rasterizationSamples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleShadingEnable = sampleShadingEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minSampleShading = minSampleShading_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSampleMask = pSampleMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToCoverageEnable = alphaToCoverageEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaToOneEnable = alphaToOneEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, const VULKAN_HPP_NAMESPACE::SampleMask * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( rasterizationSamples == rhs.rasterizationSamples )
|
|
&& ( sampleShadingEnable == rhs.sampleShadingEnable )
|
|
&& ( minSampleShading == rhs.minSampleShading )
|
|
&& ( pSampleMask == rhs.pSampleMask )
|
|
&& ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
|
|
&& ( alphaToOneEnable == rhs.alphaToOneEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
|
|
float minSampleShading = {};
|
|
const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value, "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
|
|
{
|
|
using Type = PipelineMultisampleStateCreateInfo;
|
|
};
|
|
|
|
struct StencilOpState
|
|
{
|
|
using NativeType = VkStencilOpState;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
failOp = failOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
passOp = passOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthFailOp = depthFailOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareOp = compareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareMask = compareMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
writeMask = writeMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
reference = reference_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkStencilOpState*>( this );
|
|
}
|
|
|
|
explicit operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkStencilOpState*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::StencilOp const &, VULKAN_HPP_NAMESPACE::CompareOp const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( StencilOpState const & ) const = default;
|
|
#else
|
|
bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( failOp == rhs.failOp )
|
|
&& ( passOp == rhs.passOp )
|
|
&& ( depthFailOp == rhs.depthFailOp )
|
|
&& ( compareOp == rhs.compareOp )
|
|
&& ( compareMask == rhs.compareMask )
|
|
&& ( writeMask == rhs.writeMask )
|
|
&& ( reference == rhs.reference );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
uint32_t compareMask = {};
|
|
uint32_t writeMask = {};
|
|
uint32_t reference = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "StencilOpState is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineDepthStencilStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineDepthStencilStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthTestEnable = depthTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthWriteEnable = depthWriteEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthCompareOp = depthCompareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthBoundsTestEnable = depthBoundsTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilTestEnable = stencilTestEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
front = front_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
back = back_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minDepthBounds = minDepthBounds_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxDepthBounds = maxDepthBounds_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, VULKAN_HPP_NAMESPACE::StencilOpState const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, depthTestEnable, depthWriteEnable, depthCompareOp, depthBoundsTestEnable, stencilTestEnable, front, back, minDepthBounds, maxDepthBounds );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( depthTestEnable == rhs.depthTestEnable )
|
|
&& ( depthWriteEnable == rhs.depthWriteEnable )
|
|
&& ( depthCompareOp == rhs.depthCompareOp )
|
|
&& ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
|
|
&& ( stencilTestEnable == rhs.stencilTestEnable )
|
|
&& ( front == rhs.front )
|
|
&& ( back == rhs.back )
|
|
&& ( minDepthBounds == rhs.minDepthBounds )
|
|
&& ( maxDepthBounds == rhs.maxDepthBounds );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
|
|
VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
|
|
VULKAN_HPP_NAMESPACE::StencilOpState front = {};
|
|
VULKAN_HPP_NAMESPACE::StencilOpState back = {};
|
|
float minDepthBounds = {};
|
|
float maxDepthBounds = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value, "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
|
|
{
|
|
using Type = PipelineDepthStencilStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineColorBlendAttachmentState
|
|
{
|
|
using NativeType = VkPipelineColorBlendAttachmentState;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendEnable = blendEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcColorBlendFactor = srcColorBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstColorBlendFactor = dstColorBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorBlendOp = colorBlendOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAlphaBlendFactor = srcAlphaBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAlphaBlendFactor = dstAlphaBlendFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
alphaBlendOp = alphaBlendOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorWriteMask = colorWriteMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendFactor const &, VULKAN_HPP_NAMESPACE::BlendOp const &, VULKAN_HPP_NAMESPACE::ColorComponentFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( blendEnable == rhs.blendEnable )
|
|
&& ( srcColorBlendFactor == rhs.srcColorBlendFactor )
|
|
&& ( dstColorBlendFactor == rhs.dstColorBlendFactor )
|
|
&& ( colorBlendOp == rhs.colorBlendOp )
|
|
&& ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
|
|
&& ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
|
|
&& ( alphaBlendOp == rhs.alphaBlendOp )
|
|
&& ( colorWriteMask == rhs.colorWriteMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
|
|
VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
|
|
VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineColorBlendStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineColorBlendStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, std::array<float,4> const & blendConstants_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const & blendConstants_ = {} )
|
|
: flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOpEnable = logicOpEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
logicOp = logicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendConstants = blendConstants_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::LogicOp const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( logicOpEnable == rhs.logicOpEnable )
|
|
&& ( logicOp == rhs.logicOp )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pAttachments == rhs.pAttachments )
|
|
&& ( blendConstants == rhs.blendConstants );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
|
|
VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
|
|
{
|
|
using Type = PipelineColorBlendStateCreateInfo;
|
|
};
|
|
|
|
struct PipelineDynamicStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineDynamicStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
|
|
: flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicStateCount = dynamicStateCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDynamicStates = pDynamicStates_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
|
|
pDynamicStates = dynamicStates_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DynamicState * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( dynamicStateCount == rhs.dynamicStateCount )
|
|
&& ( pDynamicStates == rhs.pDynamicStates );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
|
|
uint32_t dynamicStateCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
|
|
{
|
|
using Type = PipelineDynamicStateCreateInfo;
|
|
};
|
|
|
|
struct GraphicsPipelineCreateInfo
|
|
{
|
|
using NativeType = VkGraphicsPipelineCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
|
|
: flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = stageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStages = pStages_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = static_cast<uint32_t>( stages_.size() );
|
|
pStages = stages_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexInputState = pVertexInputState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAssemblyState = pInputAssemblyState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTessellationState = pTessellationState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewportState = pViewportState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRasterizationState = pRasterizationState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMultisampleState = pMultisampleState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilState = pDepthStencilState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorBlendState = pColorBlendState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDynamicState = pDynamicState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stageCount, pStages, pVertexInputState, pInputAssemblyState, pTessellationState, pViewportState, pRasterizationState, pMultisampleState, pDepthStencilState, pColorBlendState, pDynamicState, layout, renderPass, subpass, basePipelineHandle, basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( stageCount == rhs.stageCount )
|
|
&& ( pStages == rhs.pStages )
|
|
&& ( pVertexInputState == rhs.pVertexInputState )
|
|
&& ( pInputAssemblyState == rhs.pInputAssemblyState )
|
|
&& ( pTessellationState == rhs.pTessellationState )
|
|
&& ( pViewportState == rhs.pViewportState )
|
|
&& ( pRasterizationState == rhs.pRasterizationState )
|
|
&& ( pMultisampleState == rhs.pMultisampleState )
|
|
&& ( pDepthStencilState == rhs.pDepthStencilState )
|
|
&& ( pColorBlendState == rhs.pColorBlendState )
|
|
&& ( pDynamicState == rhs.pDynamicState )
|
|
&& ( layout == rhs.layout )
|
|
&& ( renderPass == rhs.renderPass )
|
|
&& ( subpass == rhs.subpass )
|
|
&& ( basePipelineHandle == rhs.basePipelineHandle )
|
|
&& ( basePipelineIndex == rhs.basePipelineIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
uint32_t stageCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t subpass = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
|
|
{
|
|
using Type = GraphicsPipelineCreateInfo;
|
|
};
|
|
|
|
struct GraphicsShaderGroupCreateInfoNV
|
|
{
|
|
using NativeType = VkGraphicsShaderGroupCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} )
|
|
: stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = stageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStages = pStages_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = static_cast<uint32_t>( stages_.size() );
|
|
pStages = stages_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexInputState = pVertexInputState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTessellationState = pTessellationState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stageCount == rhs.stageCount )
|
|
&& ( pStages == rhs.pStages )
|
|
&& ( pVertexInputState == rhs.pVertexInputState )
|
|
&& ( pTessellationState == rhs.pTessellationState );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t stageCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
|
|
{
|
|
using Type = GraphicsShaderGroupCreateInfoNV;
|
|
};
|
|
|
|
struct GraphicsPipelineShaderGroupsCreateInfoNV
|
|
{
|
|
using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
|
|
: groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = groupCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pGroups = pGroups_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = static_cast<uint32_t>( groups_.size() );
|
|
pGroups = groups_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCount = pipelineCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelines = pPipelines_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCount = static_cast<uint32_t>( pipelines_.size() );
|
|
pPipelines = pipelines_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( groupCount == rhs.groupCount )
|
|
&& ( pGroups == rhs.pGroups )
|
|
&& ( pipelineCount == rhs.pipelineCount )
|
|
&& ( pPipelines == rhs.pPipelines );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t groupCount = {};
|
|
const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {};
|
|
uint32_t pipelineCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value, "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
|
|
{
|
|
using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
|
|
};
|
|
|
|
struct XYColorEXT
|
|
{
|
|
using NativeType = VkXYColorEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkXYColorEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkXYColorEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( XYColorEXT const & ) const = default;
|
|
#else
|
|
bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float x = {};
|
|
float y = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "XYColorEXT is not nothrow_move_constructible!" );
|
|
|
|
struct HdrMetadataEXT
|
|
{
|
|
using NativeType = VkHdrMetadataEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryRed = displayPrimaryRed_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryGreen = displayPrimaryGreen_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
displayPrimaryBlue = displayPrimaryBlue_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
whitePoint = whitePoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLuminance = maxLuminance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLuminance = minLuminance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxContentLightLevel = maxContentLightLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkHdrMetadataEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, VULKAN_HPP_NAMESPACE::XYColorEXT const &, float const &, float const &, float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, displayPrimaryRed, displayPrimaryGreen, displayPrimaryBlue, whitePoint, maxLuminance, minLuminance, maxContentLightLevel, maxFrameAverageLightLevel );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( HdrMetadataEXT const & ) const = default;
|
|
#else
|
|
bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( displayPrimaryRed == rhs.displayPrimaryRed )
|
|
&& ( displayPrimaryGreen == rhs.displayPrimaryGreen )
|
|
&& ( displayPrimaryBlue == rhs.displayPrimaryBlue )
|
|
&& ( whitePoint == rhs.whitePoint )
|
|
&& ( maxLuminance == rhs.maxLuminance )
|
|
&& ( minLuminance == rhs.minLuminance )
|
|
&& ( maxContentLightLevel == rhs.maxContentLightLevel )
|
|
&& ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
|
|
VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
|
|
float maxLuminance = {};
|
|
float minLuminance = {};
|
|
float maxContentLightLevel = {};
|
|
float maxFrameAverageLightLevel = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "HdrMetadataEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eHdrMetadataEXT>
|
|
{
|
|
using Type = HdrMetadataEXT;
|
|
};
|
|
|
|
struct HeadlessSurfaceCreateInfoEXT
|
|
{
|
|
using NativeType = VkHeadlessSurfaceCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
|
|
{
|
|
using Type = HeadlessSurfaceCreateInfoEXT;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_IOS_MVK )
|
|
struct IOSSurfaceCreateInfoMVK
|
|
{
|
|
using NativeType = VkIOSSurfaceCreateInfoMVK;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pView( pView_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pView = pView_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
|
|
}
|
|
|
|
explicit operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pView );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default;
|
|
#else
|
|
bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pView == rhs.pView );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
|
|
const void * pView = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
|
|
{
|
|
using Type = IOSSurfaceCreateInfoMVK;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_IOS_MVK*/
|
|
|
|
struct ImageBlit
|
|
{
|
|
using NativeType = VkImageBlit;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffsets = srcOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffsets = dstOffsets_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageBlit*>( this );
|
|
}
|
|
|
|
explicit operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageBlit*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageBlit const & ) const = default;
|
|
#else
|
|
bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffsets == rhs.srcOffsets )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffsets == rhs.dstOffsets );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "ImageBlit is not nothrow_move_constructible!" );
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImageFormatConstraintsInfoFUCHSIA
|
|
{
|
|
using NativeType = VkImageFormatConstraintsInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, uint64_t sysmemPixelFormat_ = {}, uint32_t colorSpaceCount_ = {}, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageCreateInfo( imageCreateInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), flags( flags_ ), sysmemPixelFormat( sysmemPixelFormat_ ), colorSpaceCount( colorSpaceCount_ ), pColorSpaces( pColorSpaces_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast<ImageFormatConstraintsInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, uint64_t sysmemPixelFormat_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_ )
|
|
: imageCreateInfo( imageCreateInfo_ ), requiredFormatFeatures( requiredFormatFeatures_ ), flags( flags_ ), sysmemPixelFormat( sysmemPixelFormat_ ), colorSpaceCount( static_cast<uint32_t>( colorSpaces_.size() ) ), pColorSpaces( colorSpaces_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageCreateInfo = imageCreateInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
requiredFormatFeatures = requiredFormatFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sysmemPixelFormat = sysmemPixelFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorSpaceCount = colorSpaceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorSpaces = pColorSpaces_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA> const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorSpaceCount = static_cast<uint32_t>( colorSpaces_.size() );
|
|
pColorSpaces = colorSpaces_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageCreateInfo const &, VULKAN_HPP_NAMESPACE::FormatFeatureFlags const &, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageCreateInfo == rhs.imageCreateInfo )
|
|
&& ( requiredFormatFeatures == rhs.requiredFormatFeatures )
|
|
&& ( flags == rhs.flags )
|
|
&& ( sysmemPixelFormat == rhs.sysmemPixelFormat )
|
|
&& ( colorSpaceCount == rhs.colorSpaceCount )
|
|
&& ( pColorSpaces == rhs.pColorSpaces );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {};
|
|
VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {};
|
|
VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {};
|
|
uint64_t sysmemPixelFormat = {};
|
|
uint32_t colorSpaceCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageFormatConstraintsInfoFUCHSIA>
|
|
{
|
|
using Type = ImageFormatConstraintsInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImageConstraintsInfoFUCHSIA
|
|
{
|
|
using NativeType = VkImageConstraintsInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(uint32_t formatConstraintsCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: formatConstraintsCount( formatConstraintsCount_ ), pFormatConstraints( pFormatConstraints_ ), bufferCollectionConstraints( bufferCollectionConstraints_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageConstraintsInfoFUCHSIA( *reinterpret_cast<ImageConstraintsInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {} )
|
|
: formatConstraintsCount( static_cast<uint32_t>( formatConstraints_.size() ) ), pFormatConstraints( formatConstraints_.data() ), bufferCollectionConstraints( bufferCollectionConstraints_ ), flags( flags_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatConstraintsCount = formatConstraintsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFormatConstraints = pFormatConstraints_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageConstraintsInfoFUCHSIA & setFormatConstraints( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA> const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatConstraintsCount = static_cast<uint32_t>( formatConstraints_.size() );
|
|
pFormatConstraints = formatConstraints_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferCollectionConstraints = bufferCollectionConstraints_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageConstraintsInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * const &, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( formatConstraintsCount == rhs.formatConstraintsCount )
|
|
&& ( pFormatConstraints == rhs.pFormatConstraints )
|
|
&& ( bufferCollectionConstraints == rhs.bufferCollectionConstraints )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
uint32_t formatConstraintsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {};
|
|
VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageConstraintsInfoFUCHSIA>
|
|
{
|
|
using Type = ImageConstraintsInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct ImageCopy
|
|
{
|
|
using NativeType = VkImageCopy;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageCopy*>( this );
|
|
}
|
|
|
|
explicit operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageCopy*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageCopy const & ) const = default;
|
|
#else
|
|
bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffset == rhs.srcOffset )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( extent == rhs.extent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "ImageCopy is not nothrow_move_constructible!" );
|
|
|
|
struct SubresourceLayout
|
|
{
|
|
using NativeType = VkSubresourceLayout;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubresourceLayout*>( this );
|
|
}
|
|
|
|
explicit operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubresourceLayout*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, size, rowPitch, arrayPitch, depthPitch );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubresourceLayout const & ) const = default;
|
|
#else
|
|
bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( offset == rhs.offset )
|
|
&& ( size == rhs.size )
|
|
&& ( rowPitch == rhs.rowPitch )
|
|
&& ( arrayPitch == rhs.arrayPitch )
|
|
&& ( depthPitch == rhs.depthPitch );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "SubresourceLayout is not nothrow_move_constructible!" );
|
|
|
|
struct ImageDrmFormatModifierExplicitCreateInfoEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ )
|
|
: drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifier = drmFormatModifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPlaneLayouts = pPlaneLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
|
|
pPlaneLayouts = planeLayouts_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubresourceLayout * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifier == rhs.drmFormatModifier )
|
|
&& ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
|
|
&& ( pPlaneLayouts == rhs.pPlaneLayouts );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
uint32_t drmFormatModifierPlaneCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageDrmFormatModifierListCreateInfoEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierListCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t * pDrmFormatModifiers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
|
|
: drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierCount = drmFormatModifierCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDrmFormatModifiers = pDrmFormatModifiers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
|
|
pDrmFormatModifiers = drmFormatModifiers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifierCount == rhs.drmFormatModifierCount )
|
|
&& ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t drmFormatModifierCount = {};
|
|
const uint64_t * pDrmFormatModifiers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value, "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierListCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageDrmFormatModifierPropertiesEXT
|
|
{
|
|
using NativeType = VkImageDrmFormatModifierPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifier == rhs.drmFormatModifier );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
|
|
void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value, "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
|
|
{
|
|
using Type = ImageDrmFormatModifierPropertiesEXT;
|
|
};
|
|
|
|
struct ImageFormatListCreateInfo
|
|
{
|
|
using NativeType = VkImageFormatListCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
|
|
: viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = viewFormatCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewFormats = pViewFormats_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
|
|
pViewFormats = viewFormats_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewFormatCount, pViewFormats );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( viewFormatCount == rhs.viewFormatCount )
|
|
&& ( pViewFormats == rhs.pViewFormats );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t viewFormatCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "ImageFormatListCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
|
|
{
|
|
using Type = ImageFormatListCreateInfo;
|
|
};
|
|
using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
|
|
|
|
struct ImageFormatProperties2
|
|
{
|
|
using NativeType = VkImageFormatProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageFormatProperties( imageFormatProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageFormatProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageFormatProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageFormatProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageFormatProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageFormatProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageFormatProperties == rhs.imageFormatProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "ImageFormatProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageFormatProperties2>
|
|
{
|
|
using Type = ImageFormatProperties2;
|
|
};
|
|
using ImageFormatProperties2KHR = ImageFormatProperties2;
|
|
|
|
struct ImageMemoryBarrier
|
|
{
|
|
using NativeType = VkImageMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldLayout = oldLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
newLayout = newLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcQueueFamilyIndex = srcQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstQueueFamilyIndex = dstQueueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
|
|
}
|
|
|
|
explicit operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryBarrier*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageMemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( oldLayout == rhs.oldLayout )
|
|
&& ( newLayout == rhs.newLayout )
|
|
&& ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
|
|
&& ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
|
|
&& ( image == rhs.image )
|
|
&& ( subresourceRange == rhs.subresourceRange );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t srcQueueFamilyIndex = {};
|
|
uint32_t dstQueueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "ImageMemoryBarrier is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryBarrier>
|
|
{
|
|
using Type = ImageMemoryBarrier;
|
|
};
|
|
|
|
struct ImageMemoryRequirementsInfo2
|
|
{
|
|
using NativeType = VkImageMemoryRequirementsInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( image == rhs.image );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
|
|
{
|
|
using Type = ImageMemoryRequirementsInfo2;
|
|
};
|
|
using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImagePipeSurfaceCreateInfoFUCHSIA
|
|
{
|
|
using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), imagePipeHandle( imagePipeHandle_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagePipeHandle = imagePipeHandle_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA const &, zx_handle_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, imagePipeHandle );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
|
|
zx_handle_t imagePipeHandle = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
|
|
{
|
|
using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct ImagePlaneMemoryRequirementsInfo
|
|
{
|
|
using NativeType = VkImagePlaneMemoryRequirementsInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
|
|
: planeAspect( planeAspect_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
planeAspect = planeAspect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, planeAspect );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( planeAspect == rhs.planeAspect );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
|
|
{
|
|
using Type = ImagePlaneMemoryRequirementsInfo;
|
|
};
|
|
using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
|
|
|
|
struct ImageResolve
|
|
{
|
|
using NativeType = VkImageResolve;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageResolve*>( this );
|
|
}
|
|
|
|
explicit operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageResolve*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageResolve const & ) const = default;
|
|
#else
|
|
bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffset == rhs.srcOffset )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( extent == rhs.extent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "ImageResolve is not nothrow_move_constructible!" );
|
|
|
|
struct ImageResolve2
|
|
{
|
|
using NativeType = VkImageResolve2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageResolve2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageResolve2( *reinterpret_cast<ImageResolve2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubresource = srcSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcOffset = srcOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubresource = dstSubresource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstOffset = dstOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageResolve2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageResolve2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::Offset3D const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageResolve2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcSubresource == rhs.srcSubresource )
|
|
&& ( srcOffset == rhs.srcOffset )
|
|
&& ( dstSubresource == rhs.dstSubresource )
|
|
&& ( dstOffset == rhs.dstOffset )
|
|
&& ( extent == rhs.extent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
|
|
VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D extent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "ImageResolve2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageResolve2>
|
|
{
|
|
using Type = ImageResolve2;
|
|
};
|
|
using ImageResolve2KHR = ImageResolve2;
|
|
|
|
struct ImageSparseMemoryRequirementsInfo2
|
|
{
|
|
using NativeType = VkImageSparseMemoryRequirementsInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( image == rhs.image );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value, "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
|
|
{
|
|
using Type = ImageSparseMemoryRequirementsInfo2;
|
|
};
|
|
using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
|
|
|
|
struct ImageStencilUsageCreateInfo
|
|
{
|
|
using NativeType = VkImageStencilUsageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stencilUsage( stencilUsage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilUsage = stencilUsage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stencilUsage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stencilUsage == rhs.stencilUsage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
|
|
{
|
|
using Type = ImageStencilUsageCreateInfo;
|
|
};
|
|
using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
|
|
|
|
struct ImageSwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkImageSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchain( swapchain_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchain = swapchain_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchain );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchain == rhs.swapchain );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = ImageSwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct ImageViewASTCDecodeModeEXT
|
|
{
|
|
using NativeType = VkImageViewASTCDecodeModeEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: decodeMode( decodeMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
decodeMode = decodeMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, decodeMode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( decodeMode == rhs.decodeMode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
|
|
{
|
|
using Type = ImageViewASTCDecodeModeEXT;
|
|
};
|
|
|
|
struct ImageViewAddressPropertiesNVX
|
|
{
|
|
using NativeType = VkImageViewAddressPropertiesNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceAddress( deviceAddress_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceAddress, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceAddress == rhs.deviceAddress )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
|
|
{
|
|
using Type = ImageViewAddressPropertiesNVX;
|
|
};
|
|
|
|
struct ImageViewCreateInfo
|
|
{
|
|
using NativeType = VkImageViewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewType = viewType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
components = components_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subresourceRange = subresourceRange_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageViewCreateFlags const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageViewType const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( image == rhs.image )
|
|
&& ( viewType == rhs.viewType )
|
|
&& ( format == rhs.format )
|
|
&& ( components == rhs.components )
|
|
&& ( subresourceRange == rhs.subresourceRange );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
|
|
VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "ImageViewCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewCreateInfo>
|
|
{
|
|
using Type = ImageViewCreateInfo;
|
|
};
|
|
|
|
struct ImageViewHandleInfoNVX
|
|
{
|
|
using NativeType = VkImageViewHandleInfoNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampler = sampler_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, VULKAN_HPP_NAMESPACE::Sampler const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageView, descriptorType, sampler );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewHandleInfoNVX const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageView == rhs.imageView )
|
|
&& ( descriptorType == rhs.descriptorType )
|
|
&& ( sampler == rhs.sampler );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
VULKAN_HPP_NAMESPACE::Sampler sampler = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "ImageViewHandleInfoNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
|
|
{
|
|
using Type = ImageViewHandleInfoNVX;
|
|
};
|
|
|
|
struct ImageViewMinLodCreateInfoEXT
|
|
{
|
|
using NativeType = VkImageViewMinLodCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT(float minLod_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minLod( minLod_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewMinLodCreateInfoEXT( *reinterpret_cast<ImageViewMinLodCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLod = minLod_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewMinLodCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewMinLodCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minLod );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minLod == rhs.minLod );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT;
|
|
const void * pNext = {};
|
|
float minLod = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewMinLodCreateInfoEXT>
|
|
{
|
|
using Type = ImageViewMinLodCreateInfoEXT;
|
|
};
|
|
|
|
struct ImageViewUsageCreateInfo
|
|
{
|
|
using NativeType = VkImageViewUsageCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: usage( usage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, usage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( usage == rhs.usage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "ImageViewUsageCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
|
|
{
|
|
using Type = ImageViewUsageCreateInfo;
|
|
};
|
|
using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct ImportAndroidHardwareBufferInfoANDROID
|
|
{
|
|
using NativeType = VkImportAndroidHardwareBufferInfoANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer * buffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, struct AHardwareBuffer * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, buffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( buffer == rhs.buffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
|
|
const void * pNext = {};
|
|
struct AHardwareBuffer * buffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value, "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
|
|
{
|
|
using Type = ImportAndroidHardwareBufferInfoANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
struct ImportFenceFdInfoKHR
|
|
{
|
|
using NativeType = VkImportFenceFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, int const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, flags, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fence == rhs.fence )
|
|
&& ( flags == rhs.flags )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( fd == rhs.fd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "ImportFenceFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
|
|
{
|
|
using Type = ImportFenceFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ImportFenceWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkImportFenceWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fence = fence_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Fence const &, VULKAN_HPP_NAMESPACE::FenceImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fence, flags, handleType, handle, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fence == rhs.fence )
|
|
&& ( flags == rhs.flags )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( handle == rhs.handle )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Fence fence = {};
|
|
VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
HANDLE handle = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
|
|
{
|
|
using Type = ImportFenceWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImportMemoryBufferCollectionFUCHSIA
|
|
{
|
|
using NativeType = VkImportMemoryBufferCollectionFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: collection( collection_ ), index( index_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast<ImportMemoryBufferCollectionFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
collection = collection_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
index = index_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryBufferCollectionFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryBufferCollectionFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, collection, index );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( collection == rhs.collection )
|
|
&& ( index == rhs.index );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {};
|
|
uint32_t index = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value, "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryBufferCollectionFUCHSIA>
|
|
{
|
|
using Type = ImportMemoryBufferCollectionFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct ImportMemoryFdInfoKHR
|
|
{
|
|
using NativeType = VkImportMemoryFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ ), fd( fd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, int const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( fd == rhs.fd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
|
|
{
|
|
using Type = ImportMemoryFdInfoKHR;
|
|
};
|
|
|
|
struct ImportMemoryHostPointerInfoEXT
|
|
{
|
|
using NativeType = VkImportMemoryHostPointerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * pHostPointer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ ), pHostPointer( pHostPointer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pHostPointer = pHostPointer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, pHostPointer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( pHostPointer == rhs.pHostPointer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
void * pHostPointer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
|
|
{
|
|
using Type = ImportMemoryHostPointerInfoEXT;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ImportMemoryWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkImportMemoryWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ ), handle( handle_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, handle, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( handle == rhs.handle )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
HANDLE handle = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
|
|
{
|
|
using Type = ImportMemoryWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ImportMemoryWin32HandleInfoNV
|
|
{
|
|
using NativeType = VkImportMemoryWin32HandleInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ ), handle( handle_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV const &, HANDLE const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, handle );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( handle == rhs.handle );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
|
|
HANDLE handle = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
|
|
{
|
|
using Type = ImportMemoryWin32HandleInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImportMemoryZirconHandleInfoFUCHSIA
|
|
{
|
|
using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ ), handle( handle_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &, zx_handle_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType, handle );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
zx_handle_t handle = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value, "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
|
|
{
|
|
using Type = ImportMemoryZirconHandleInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct ImportSemaphoreFdInfoKHR
|
|
{
|
|
using NativeType = VkImportSemaphoreFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fd = fd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, int const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, flags, handleType, fd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( flags == rhs.flags )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( fd == rhs.fd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
int fd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
|
|
{
|
|
using Type = ImportSemaphoreFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct ImportSemaphoreWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkImportSemaphoreWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handle = handle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
name = name_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, HANDLE const &, LPCWSTR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, flags, handleType, handle, name );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( flags == rhs.flags )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( handle == rhs.handle )
|
|
&& ( name == rhs.name );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
HANDLE handle = {};
|
|
LPCWSTR name = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
|
|
{
|
|
using Type = ImportSemaphoreWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct ImportSemaphoreZirconHandleInfoFUCHSIA
|
|
{
|
|
using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), zirconHandle( zirconHandle_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
zirconHandle = zirconHandle_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, zx_handle_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( flags == rhs.flags )
|
|
&& ( handleType == rhs.handleType )
|
|
&& ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
zx_handle_t zirconHandle = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value, "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
|
|
{
|
|
using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct IndirectCommandsLayoutTokenNV
|
|
{
|
|
using NativeType = VkIndirectCommandsLayoutTokenNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, const uint32_t * pIndexTypeValues_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
|
|
: tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
|
|
#else
|
|
if ( indexTypes_.size() != indexTypeValues_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tokenType = tokenType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stream = stream_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingUnit = vertexBindingUnit_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexDynamicStride = vertexDynamicStride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushconstantPipelineLayout = pushconstantPipelineLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushconstantOffset = pushconstantOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushconstantSize = pushconstantSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indirectStateFlags = indirectStateFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexTypeCount = indexTypeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pIndexTypes = pIndexTypes_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
|
|
pIndexTypes = indexTypes_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pIndexTypeValues = pIndexTypeValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
|
|
pIndexTypeValues = indexTypeValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
|
|
}
|
|
|
|
explicit operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndexType * const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, tokenType, stream, offset, vertexBindingUnit, vertexDynamicStride, pushconstantPipelineLayout, pushconstantShaderStageFlags, pushconstantOffset, pushconstantSize, indirectStateFlags, indexTypeCount, pIndexTypes, pIndexTypeValues );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default;
|
|
#else
|
|
bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( tokenType == rhs.tokenType )
|
|
&& ( stream == rhs.stream )
|
|
&& ( offset == rhs.offset )
|
|
&& ( vertexBindingUnit == rhs.vertexBindingUnit )
|
|
&& ( vertexDynamicStride == rhs.vertexDynamicStride )
|
|
&& ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
|
|
&& ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
|
|
&& ( pushconstantOffset == rhs.pushconstantOffset )
|
|
&& ( pushconstantSize == rhs.pushconstantSize )
|
|
&& ( indirectStateFlags == rhs.indirectStateFlags )
|
|
&& ( indexTypeCount == rhs.indexTypeCount )
|
|
&& ( pIndexTypes == rhs.pIndexTypes )
|
|
&& ( pIndexTypeValues == rhs.pIndexTypeValues );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
|
|
uint32_t stream = {};
|
|
uint32_t offset = {};
|
|
uint32_t vertexBindingUnit = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
|
|
uint32_t pushconstantOffset = {};
|
|
uint32_t pushconstantSize = {};
|
|
VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
|
|
uint32_t indexTypeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {};
|
|
const uint32_t * pIndexTypeValues = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
|
|
{
|
|
using Type = IndirectCommandsLayoutTokenNV;
|
|
};
|
|
|
|
struct IndirectCommandsLayoutCreateInfoNV
|
|
{
|
|
using NativeType = VkIndirectCommandsLayoutCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t * pStreamStrides_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tokenCount = tokenCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTokens = pTokens_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tokenCount = static_cast<uint32_t>( tokens_.size() );
|
|
pTokens = tokens_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
streamCount = streamCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStreamStrides = pStreamStrides_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
streamCount = static_cast<uint32_t>( streamStrides_.size() );
|
|
pStreamStrides = streamStrides_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( tokenCount == rhs.tokenCount )
|
|
&& ( pTokens == rhs.pTokens )
|
|
&& ( streamCount == rhs.streamCount )
|
|
&& ( pStreamStrides == rhs.pStreamStrides );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
uint32_t tokenCount = {};
|
|
const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {};
|
|
uint32_t streamCount = {};
|
|
const uint32_t * pStreamStrides = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value, "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
|
|
{
|
|
using Type = IndirectCommandsLayoutCreateInfoNV;
|
|
};
|
|
|
|
struct InitializePerformanceApiInfoINTEL
|
|
{
|
|
using NativeType = VkInitializePerformanceApiInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void * pUserData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pUserData( pUserData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pUserData = pUserData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pUserData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pUserData == rhs.pUserData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
|
|
const void * pNext = {};
|
|
void * pUserData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
|
|
{
|
|
using Type = InitializePerformanceApiInfoINTEL;
|
|
};
|
|
|
|
struct InputAttachmentAspectReference
|
|
{
|
|
using NativeType = VkInputAttachmentAspectReference;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpass = subpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentIndex = inputAttachmentIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectMask = aspectMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
|
|
}
|
|
|
|
explicit operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageAspectFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( subpass, inputAttachmentIndex, aspectMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( InputAttachmentAspectReference const & ) const = default;
|
|
#else
|
|
bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( subpass == rhs.subpass )
|
|
&& ( inputAttachmentIndex == rhs.inputAttachmentIndex )
|
|
&& ( aspectMask == rhs.aspectMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t subpass = {};
|
|
uint32_t inputAttachmentIndex = {};
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "InputAttachmentAspectReference is not nothrow_move_constructible!" );
|
|
using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
|
|
|
|
struct InstanceCreateInfo
|
|
{
|
|
using NativeType = VkInstanceCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {} )
|
|
: flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pApplicationInfo = pApplicationInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = enabledLayerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledLayerNames = ppEnabledLayerNames_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
|
|
ppEnabledLayerNames = pEnabledLayerNames_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = enabledExtensionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppEnabledExtensionNames = ppEnabledExtensionNames_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
|
|
ppEnabledExtensionNames = pEnabledExtensionNames_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkInstanceCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::InstanceCreateFlags const &, const VULKAN_HPP_NAMESPACE::ApplicationInfo * const &, uint32_t const &, const char * const * const &, uint32_t const &, const char * const * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) return cmp;
|
|
if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp;
|
|
for ( size_t i = 0; i < enabledLayerCount; ++i )
|
|
{
|
|
if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp;
|
|
for ( size_t i = 0; i < enabledExtensionCount; ++i )
|
|
{
|
|
if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] )
|
|
if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 )
|
|
return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
}
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pApplicationInfo == rhs.pApplicationInfo )
|
|
&& ( enabledLayerCount == rhs.enabledLayerCount )
|
|
&& [this, rhs]
|
|
{
|
|
bool equal = true;
|
|
for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i )
|
|
{
|
|
equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) );
|
|
}
|
|
return equal;
|
|
}()
|
|
&& ( enabledExtensionCount == rhs.enabledExtensionCount )
|
|
&& [this, rhs]
|
|
{
|
|
bool equal = true;
|
|
for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i )
|
|
{
|
|
equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) );
|
|
}
|
|
return equal;
|
|
}();
|
|
}
|
|
|
|
bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
|
|
const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {};
|
|
uint32_t enabledLayerCount = {};
|
|
const char * const * ppEnabledLayerNames = {};
|
|
uint32_t enabledExtensionCount = {};
|
|
const char * const * ppEnabledExtensionNames = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "InstanceCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eInstanceCreateInfo>
|
|
{
|
|
using Type = InstanceCreateInfo;
|
|
};
|
|
|
|
struct LayerProperties
|
|
{
|
|
using NativeType = VkLayerProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkLayerProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkLayerProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( layerName, specVersion, implementationVersion, description );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( LayerProperties const & ) const = default;
|
|
#else
|
|
bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( layerName == rhs.layerName )
|
|
&& ( specVersion == rhs.specVersion )
|
|
&& ( implementationVersion == rhs.implementationVersion )
|
|
&& ( description == rhs.description );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
|
|
uint32_t specVersion = {};
|
|
uint32_t implementationVersion = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "LayerProperties is not nothrow_move_constructible!" );
|
|
|
|
#if defined( VK_USE_PLATFORM_MACOS_MVK )
|
|
struct MacOSSurfaceCreateInfoMVK
|
|
{
|
|
using NativeType = VkMacOSSurfaceCreateInfoMVK;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pView( pView_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pView = pView_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
|
|
}
|
|
|
|
explicit operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pView );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default;
|
|
#else
|
|
bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pView == rhs.pView );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
|
|
const void * pView = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
|
|
{
|
|
using Type = MacOSSurfaceCreateInfoMVK;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
|
|
|
|
struct MappedMemoryRange
|
|
{
|
|
using NativeType = VkMappedMemoryRange;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ ), offset( offset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMappedMemoryRange*>( this );
|
|
}
|
|
|
|
explicit operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMappedMemoryRange*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, offset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MappedMemoryRange const & ) const = default;
|
|
#else
|
|
bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "MappedMemoryRange is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMappedMemoryRange>
|
|
{
|
|
using Type = MappedMemoryRange;
|
|
};
|
|
|
|
struct MemoryAllocateFlagsInfo
|
|
{
|
|
using NativeType = VkMemoryAllocateFlagsInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), deviceMask( deviceMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMask = deviceMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MemoryAllocateFlags const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, deviceMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( deviceMask == rhs.deviceMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
|
|
uint32_t deviceMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
|
|
{
|
|
using Type = MemoryAllocateFlagsInfo;
|
|
};
|
|
using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
|
|
|
|
struct MemoryAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
allocationSize = allocationSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryTypeIndex = memoryTypeIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allocationSize, memoryTypeIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( allocationSize == rhs.allocationSize )
|
|
&& ( memoryTypeIndex == rhs.memoryTypeIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
|
|
uint32_t memoryTypeIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "MemoryAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
|
|
{
|
|
using Type = MemoryAllocateInfo;
|
|
};
|
|
|
|
struct MemoryBarrier
|
|
{
|
|
using NativeType = VkMemoryBarrier;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryBarrier*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryBarrier*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcAccessMask, dstAccessMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryBarrier const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "MemoryBarrier is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryBarrier>
|
|
{
|
|
using Type = MemoryBarrier;
|
|
};
|
|
|
|
struct MemoryDedicatedAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryDedicatedAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: image( image_ ), buffer( buffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
image = image_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
buffer = buffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::Buffer const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, image, buffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( image == rhs.image )
|
|
&& ( buffer == rhs.buffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image image = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer buffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
|
|
{
|
|
using Type = MemoryDedicatedAllocateInfo;
|
|
};
|
|
using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
|
|
|
|
struct MemoryDedicatedRequirements
|
|
{
|
|
using NativeType = VkMemoryDedicatedRequirements;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
|
|
&& ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "MemoryDedicatedRequirements is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
|
|
{
|
|
using Type = MemoryDedicatedRequirements;
|
|
};
|
|
using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
|
|
|
|
struct MemoryFdPropertiesKHR
|
|
{
|
|
using NativeType = VkMemoryFdPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "MemoryFdPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
|
|
{
|
|
using Type = MemoryFdPropertiesKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
|
|
struct MemoryGetAndroidHardwareBufferInfoANDROID
|
|
{
|
|
using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value, "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
|
|
{
|
|
using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|
|
|
|
struct MemoryGetFdInfoKHR
|
|
{
|
|
using NativeType = VkMemoryGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "MemoryGetFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
|
|
{
|
|
using Type = MemoryGetFdInfoKHR;
|
|
};
|
|
|
|
struct MemoryGetRemoteAddressInfoNV
|
|
{
|
|
using NativeType = VkMemoryGetRemoteAddressInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetRemoteAddressInfoNV( *reinterpret_cast<MemoryGetRemoteAddressInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetRemoteAddressInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetRemoteAddressInfoNV>
|
|
{
|
|
using Type = MemoryGetRemoteAddressInfoNV;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct MemoryGetWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkMemoryGetWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
|
|
{
|
|
using Type = MemoryGetWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct MemoryGetZirconHandleInfoFUCHSIA
|
|
{
|
|
using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: memory( memory_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memory, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memory == rhs.memory )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
|
|
{
|
|
using Type = MemoryGetZirconHandleInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct MemoryHeap
|
|
{
|
|
using NativeType = VkMemoryHeap;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: size( size_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryHeap*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryHeap*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::MemoryHeapFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( size, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryHeap const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( size == rhs.size )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "MemoryHeap is not nothrow_move_constructible!" );
|
|
|
|
struct MemoryHostPointerPropertiesEXT
|
|
{
|
|
using NativeType = VkMemoryHostPointerPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
|
|
{
|
|
using Type = MemoryHostPointerPropertiesEXT;
|
|
};
|
|
|
|
struct MemoryOpaqueCaptureAddressAllocateInfo
|
|
{
|
|
using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: opaqueCaptureAddress( opaqueCaptureAddress_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
opaqueCaptureAddress = opaqueCaptureAddress_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, opaqueCaptureAddress );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
|
|
const void * pNext = {};
|
|
uint64_t opaqueCaptureAddress = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value, "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
|
|
{
|
|
using Type = MemoryOpaqueCaptureAddressAllocateInfo;
|
|
};
|
|
using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
|
|
|
|
struct MemoryPriorityAllocateInfoEXT
|
|
{
|
|
using NativeType = VkMemoryPriorityAllocateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: priority( priority_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
priority = priority_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, priority );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( priority == rhs.priority );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
|
|
const void * pNext = {};
|
|
float priority = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
|
|
{
|
|
using Type = MemoryPriorityAllocateInfoEXT;
|
|
};
|
|
|
|
struct MemoryRequirements
|
|
{
|
|
using NativeType = VkMemoryRequirements;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryRequirements*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryRequirements*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( size, alignment, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( size == rhs.size )
|
|
&& ( alignment == rhs.alignment )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "MemoryRequirements is not nothrow_move_constructible!" );
|
|
|
|
struct MemoryRequirements2
|
|
{
|
|
using NativeType = VkMemoryRequirements2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryRequirements( memoryRequirements_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryRequirements2*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryRequirements2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::MemoryRequirements const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryRequirements );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryRequirements2 const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryRequirements == rhs.memoryRequirements );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "MemoryRequirements2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryRequirements2>
|
|
{
|
|
using Type = MemoryRequirements2;
|
|
};
|
|
using MemoryRequirements2KHR = MemoryRequirements2;
|
|
|
|
struct MemoryType
|
|
{
|
|
using NativeType = VkMemoryType;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryType*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryType*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::MemoryPropertyFlags const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( propertyFlags, heapIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryType const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( propertyFlags == rhs.propertyFlags )
|
|
&& ( heapIndex == rhs.heapIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
|
|
uint32_t heapIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryType>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryType>::value, "MemoryType is not nothrow_move_constructible!" );
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct MemoryWin32HandlePropertiesKHR
|
|
{
|
|
using NativeType = VkMemoryWin32HandlePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
|
|
{
|
|
using Type = MemoryWin32HandlePropertiesKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct MemoryZirconHandlePropertiesFUCHSIA
|
|
{
|
|
using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeBits( memoryTypeBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryTypeBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryTypeBits == rhs.memoryTypeBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
|
|
void * pNext = {};
|
|
uint32_t memoryTypeBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value, "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
|
|
{
|
|
using Type = MemoryZirconHandlePropertiesFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
#if defined( VK_USE_PLATFORM_METAL_EXT )
|
|
struct MetalSurfaceCreateInfoEXT
|
|
{
|
|
using NativeType = VkMetalSurfaceCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pLayer( pLayer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLayer = pLayer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT const &, const CAMetalLayer * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pLayer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pLayer == rhs.pLayer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
|
|
const CAMetalLayer * pLayer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
|
|
{
|
|
using Type = MetalSurfaceCreateInfoEXT;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_METAL_EXT*/
|
|
|
|
struct MultiDrawIndexedInfoEXT
|
|
{
|
|
using NativeType = VkMultiDrawIndexedInfoEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT(uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: firstIndex( firstIndex_ ), indexCount( indexCount_ ), vertexOffset( vertexOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MultiDrawIndexedInfoEXT( *reinterpret_cast<MultiDrawIndexedInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstIndex = firstIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexCount = indexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexOffset = vertexOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMultiDrawIndexedInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMultiDrawIndexedInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( firstIndex, indexCount, vertexOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( firstIndex == rhs.firstIndex )
|
|
&& ( indexCount == rhs.indexCount )
|
|
&& ( vertexOffset == rhs.vertexOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t firstIndex = {};
|
|
uint32_t indexCount = {};
|
|
int32_t vertexOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
struct MultiDrawInfoEXT
|
|
{
|
|
using NativeType = VkMultiDrawInfoEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT(uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: firstVertex( firstVertex_ ), vertexCount( vertexCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MultiDrawInfoEXT( *reinterpret_cast<MultiDrawInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
firstVertex = firstVertex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexCount = vertexCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMultiDrawInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMultiDrawInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( firstVertex, vertexCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MultiDrawInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( firstVertex == rhs.firstVertex )
|
|
&& ( vertexCount == rhs.vertexCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t firstVertex = {};
|
|
uint32_t vertexCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "MultiDrawInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
struct MultisamplePropertiesEXT
|
|
{
|
|
using NativeType = VkMultisamplePropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxSampleLocationGridSize( maxSampleLocationGridSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSampleLocationGridSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "MultisamplePropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
|
|
{
|
|
using Type = MultisamplePropertiesEXT;
|
|
};
|
|
|
|
struct MultiviewPerViewAttributesInfoNVX
|
|
{
|
|
using NativeType = VkMultiviewPerViewAttributesInfoNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: perViewAttributes( perViewAttributes_ ), perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MultiviewPerViewAttributesInfoNVX( *reinterpret_cast<MultiviewPerViewAttributesInfoNVX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
perViewAttributes = perViewAttributes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMultiviewPerViewAttributesInfoNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMultiviewPerViewAttributesInfoNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default;
|
|
#else
|
|
bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( perViewAttributes == rhs.perViewAttributes )
|
|
&& ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMultiviewPerViewAttributesInfoNVX>
|
|
{
|
|
using Type = MultiviewPerViewAttributesInfoNVX;
|
|
};
|
|
|
|
struct MutableDescriptorTypeListVALVE
|
|
{
|
|
using NativeType = VkMutableDescriptorTypeListVALVE;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorTypeCount( descriptorTypeCount_ ), pDescriptorTypes( pDescriptorTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MutableDescriptorTypeListVALVE( *reinterpret_cast<MutableDescriptorTypeListVALVE const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
MutableDescriptorTypeListVALVE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ )
|
|
: descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MutableDescriptorTypeListVALVE & operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorTypeCount = descriptorTypeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDescriptorTypes = pDescriptorTypes_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
MutableDescriptorTypeListVALVE & setDescriptorTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
|
|
pDescriptorTypes = descriptorTypes_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMutableDescriptorTypeListVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMutableDescriptorTypeListVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorType * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( descriptorTypeCount, pDescriptorTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( descriptorTypeCount == rhs.descriptorTypeCount )
|
|
&& ( pDescriptorTypes == rhs.pDescriptorTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t descriptorTypeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE ) == sizeof( VkMutableDescriptorTypeListVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE>::value, "MutableDescriptorTypeListVALVE is not nothrow_move_constructible!" );
|
|
|
|
struct MutableDescriptorTypeCreateInfoVALVE
|
|
{
|
|
using NativeType = VkMutableDescriptorTypeCreateInfoVALVE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ), pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast<MutableDescriptorTypeCreateInfoVALVE const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
MutableDescriptorTypeCreateInfoVALVE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const & mutableDescriptorTypeLists_ )
|
|
: mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) ), pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
MutableDescriptorTypeCreateInfoVALVE & operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
MutableDescriptorTypeCreateInfoVALVE & operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
|
|
pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount )
|
|
&& ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
|
|
const void * pNext = {};
|
|
uint32_t mutableDescriptorTypeListCount = {};
|
|
const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE ) == sizeof( VkMutableDescriptorTypeCreateInfoVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE>::value, "MutableDescriptorTypeCreateInfoVALVE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoVALVE>
|
|
{
|
|
using Type = MutableDescriptorTypeCreateInfoVALVE;
|
|
};
|
|
|
|
struct PastPresentationTimingGOOGLE
|
|
{
|
|
using NativeType = VkPastPresentationTimingGOOGLE;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
|
|
}
|
|
|
|
explicit operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default;
|
|
#else
|
|
bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( presentID == rhs.presentID )
|
|
&& ( desiredPresentTime == rhs.desiredPresentTime )
|
|
&& ( actualPresentTime == rhs.actualPresentTime )
|
|
&& ( earliestPresentTime == rhs.earliestPresentTime )
|
|
&& ( presentMargin == rhs.presentMargin );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t presentID = {};
|
|
uint64_t desiredPresentTime = {};
|
|
uint64_t actualPresentTime = {};
|
|
uint64_t earliestPresentTime = {};
|
|
uint64_t presentMargin = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" );
|
|
|
|
struct PerformanceConfigurationAcquireInfoINTEL
|
|
{
|
|
using NativeType = VkPerformanceConfigurationAcquireInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value, "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
|
|
{
|
|
using Type = PerformanceConfigurationAcquireInfoINTEL;
|
|
};
|
|
|
|
struct PerformanceCounterDescriptionKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterDescriptionKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), name( name_ ), category( category_ ), description( description_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, name, category, description );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( name == rhs.name )
|
|
&& ( category == rhs.category )
|
|
&& ( description == rhs.description );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
|
|
{
|
|
using Type = PerformanceCounterDescriptionKHR;
|
|
};
|
|
|
|
struct PerformanceCounterKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const & uuid_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR const &, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, unit, scope, storage, uuid );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceCounterKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( unit == rhs.unit )
|
|
&& ( scope == rhs.scope )
|
|
&& ( storage == rhs.storage )
|
|
&& ( uuid == rhs.uuid );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
|
|
VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "PerformanceCounterKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
|
|
{
|
|
using Type = PerformanceCounterKHR;
|
|
};
|
|
|
|
union PerformanceCounterResultKHR
|
|
{
|
|
using NativeType = VkPerformanceCounterResultKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} )
|
|
: int32( int32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ )
|
|
: int64( int64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ )
|
|
: uint32( uint32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ )
|
|
: uint64( uint64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ )
|
|
: float32( float32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ )
|
|
: float64( float64_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int32 = int32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
int64 = int64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint32 = uint32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uint64 = uint64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float32 = float32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
float64 = float64_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkPerformanceCounterResultKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceCounterResultKHR*>( this );
|
|
}
|
|
|
|
operator VkPerformanceCounterResultKHR &()
|
|
{
|
|
return *reinterpret_cast<VkPerformanceCounterResultKHR*>( this );
|
|
}
|
|
|
|
int32_t int32;
|
|
int64_t int64;
|
|
uint32_t uint32;
|
|
uint64_t uint64;
|
|
float float32;
|
|
double float64;
|
|
|
|
};
|
|
|
|
struct PerformanceMarkerInfoINTEL
|
|
{
|
|
using NativeType = VkPerformanceMarkerInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: marker( marker_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
marker = marker_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, marker );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( marker == rhs.marker );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
|
|
const void * pNext = {};
|
|
uint64_t marker = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
|
|
{
|
|
using Type = PerformanceMarkerInfoINTEL;
|
|
};
|
|
|
|
struct PerformanceOverrideInfoINTEL
|
|
{
|
|
using NativeType = VkPerformanceOverrideInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), enable( enable_ ), parameter( parameter_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enable = enable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
parameter = parameter_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, enable, parameter );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type )
|
|
&& ( enable == rhs.enable )
|
|
&& ( parameter == rhs.parameter );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
|
|
VULKAN_HPP_NAMESPACE::Bool32 enable = {};
|
|
uint64_t parameter = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
|
|
{
|
|
using Type = PerformanceOverrideInfoINTEL;
|
|
};
|
|
|
|
struct PerformanceQuerySubmitInfoKHR
|
|
{
|
|
using NativeType = VkPerformanceQuerySubmitInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: counterPassIndex( counterPassIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterPassIndex = counterPassIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, counterPassIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( counterPassIndex == rhs.counterPassIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t counterPassIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
|
|
{
|
|
using Type = PerformanceQuerySubmitInfoKHR;
|
|
};
|
|
|
|
struct PerformanceStreamMarkerInfoINTEL
|
|
{
|
|
using NativeType = VkPerformanceStreamMarkerInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: marker( marker_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
marker = marker_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, marker );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( marker == rhs.marker );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
|
|
const void * pNext = {};
|
|
uint32_t marker = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
|
|
{
|
|
using Type = PerformanceStreamMarkerInfoINTEL;
|
|
};
|
|
|
|
union PerformanceValueDataINTEL
|
|
{
|
|
using NativeType = VkPerformanceValueDataINTEL;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} )
|
|
: value32( value32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ )
|
|
: value64( value64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ )
|
|
: valueFloat( valueFloat_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ )
|
|
: valueString( valueString_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value32 = value32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value64 = value64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
valueFloat = valueFloat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
valueBool = valueBool_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
valueString = valueString_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkPerformanceValueDataINTEL const &() const
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceValueDataINTEL*>( this );
|
|
}
|
|
|
|
operator VkPerformanceValueDataINTEL &()
|
|
{
|
|
return *reinterpret_cast<VkPerformanceValueDataINTEL*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
uint32_t value32;
|
|
uint64_t value64;
|
|
float valueFloat;
|
|
VULKAN_HPP_NAMESPACE::Bool32 valueBool;
|
|
const char * valueString;
|
|
#else
|
|
uint32_t value32;
|
|
uint64_t value64;
|
|
float valueFloat;
|
|
VkBool32 valueBool;
|
|
const char * valueString;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct PerformanceValueINTEL
|
|
{
|
|
using NativeType = VkPerformanceValueINTEL;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), data( data_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
data = data_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL const &, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( type, data );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
|
|
VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "PerformanceValueINTEL is not nothrow_move_constructible!" );
|
|
|
|
struct PhysicalDevice16BitStorageFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevice16BitStorageFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer16BitAccess = storageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant16 = storagePushConstant16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageInputOutput16 = storageInputOutput16_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
|
|
&& ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
|
|
&& ( storagePushConstant16 == rhs.storagePushConstant16 )
|
|
&& ( storageInputOutput16 == rhs.storageInputOutput16 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value, "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
|
|
{
|
|
using Type = PhysicalDevice16BitStorageFeatures;
|
|
};
|
|
using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
|
|
|
|
struct PhysicalDevice4444FormatsFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatA4R4G4B4 = formatA4R4G4B4_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatA4B4G4R4 = formatA4B4G4R4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
|
|
&& ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value, "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDevice4444FormatsFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDevice8BitStorageFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevice8BitStorageFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer8BitAccess = storageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant8 = storagePushConstant8_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
|
|
&& ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
|
|
&& ( storagePushConstant8 == rhs.storagePushConstant8 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
|
|
{
|
|
using Type = PhysicalDevice8BitStorageFeatures;
|
|
};
|
|
using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
|
|
|
|
struct PhysicalDeviceASTCDecodeFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: decodeModeSharedExponent( decodeModeSharedExponent_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
decodeModeSharedExponent = decodeModeSharedExponent_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, decodeModeSharedExponent );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value, "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceAccelerationStructureFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructure( accelerationStructure_ ), accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ), accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ), accelerationStructureHostCommands( accelerationStructureHostCommands_ ), descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructure = accelerationStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureHostCommands = accelerationStructureHostCommands_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructure, accelerationStructureCaptureReplay, accelerationStructureIndirectBuild, accelerationStructureHostCommands, descriptorBindingAccelerationStructureUpdateAfterBind );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructure == rhs.accelerationStructure )
|
|
&& ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay )
|
|
&& ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild )
|
|
&& ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands )
|
|
&& ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value, "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceAccelerationStructurePropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ), maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ), minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxGeometryCount, maxInstanceCount, maxPrimitiveCount, maxPerStageDescriptorAccelerationStructures, maxPerStageDescriptorUpdateAfterBindAccelerationStructures, maxDescriptorSetAccelerationStructures, maxDescriptorSetUpdateAfterBindAccelerationStructures, minAccelerationStructureScratchOffsetAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxGeometryCount == rhs.maxGeometryCount )
|
|
&& ( maxInstanceCount == rhs.maxInstanceCount )
|
|
&& ( maxPrimitiveCount == rhs.maxPrimitiveCount )
|
|
&& ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures )
|
|
&& ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
|
|
&& ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures )
|
|
&& ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
|
|
void * pNext = {};
|
|
uint64_t maxGeometryCount = {};
|
|
uint64_t maxInstanceCount = {};
|
|
uint64_t maxPrimitiveCount = {};
|
|
uint32_t maxPerStageDescriptorAccelerationStructures = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
|
|
uint32_t maxDescriptorSetAccelerationStructures = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {};
|
|
uint32_t minAccelerationStructureScratchOffsetAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value, "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
|
|
{
|
|
using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, advancedBlendCoherentOperations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, advancedBlendMaxColorAttachments, advancedBlendIndependentBlend, advancedBlendNonPremultipliedSrcColor, advancedBlendNonPremultipliedDstColor, advancedBlendCorrelatedOverlap, advancedBlendAllOperations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
|
|
&& ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
|
|
&& ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
|
|
&& ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
|
|
&& ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
|
|
&& ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t advancedBlendMaxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBorderColorSwizzleFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: borderColorSwizzle( borderColorSwizzle_ ), borderColorSwizzleFromImage( borderColorSwizzleFromImage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast<PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
borderColorSwizzle = borderColorSwizzle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
borderColorSwizzleFromImage = borderColorSwizzleFromImage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBorderColorSwizzleFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( borderColorSwizzle == rhs.borderColorSwizzle )
|
|
&& ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value, "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceBufferDeviceAddressFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
|
|
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
|
|
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value, "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
|
|
{
|
|
using Type = PhysicalDeviceBufferDeviceAddressFeatures;
|
|
};
|
|
using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
|
|
|
|
struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
|
|
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
|
|
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
};
|
|
using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
|
|
|
|
struct PhysicalDeviceCoherentMemoryFeaturesAMD
|
|
{
|
|
using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceCoherentMemory( deviceCoherentMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceCoherentMemory = deviceCoherentMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceCoherentMemory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceCoherentMemory == rhs.deviceCoherentMemory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
|
|
{
|
|
using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
|
|
};
|
|
|
|
struct PhysicalDeviceColorWriteEnableFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: colorWriteEnable( colorWriteEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorWriteEnable = colorWriteEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, colorWriteEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( colorWriteEnable == rhs.colorWriteEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value, "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
|
|
&& ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceConditionalRenderingFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conditionalRendering = conditionalRendering_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inheritedConditionalRendering = inheritedConditionalRendering_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( conditionalRendering == rhs.conditionalRendering )
|
|
&& ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceConservativeRasterizationPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, float const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, primitiveOverestimationSize, maxExtraPrimitiveOverestimationSize, extraPrimitiveOverestimationSizeGranularity, primitiveUnderestimation, conservativePointAndLineRasterization, degenerateTrianglesRasterized, degenerateLinesRasterized, fullyCoveredFragmentShaderInputVariable, conservativeRasterizationPostDepthCoverage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
|
|
&& ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
|
|
&& ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
|
|
&& ( primitiveUnderestimation == rhs.primitiveUnderestimation )
|
|
&& ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
|
|
&& ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
|
|
&& ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
|
|
&& ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
|
|
&& ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
void * pNext = {};
|
|
float primitiveOverestimationSize = {};
|
|
float maxExtraPrimitiveOverestimationSize = {};
|
|
float extraPrimitiveOverestimationSizeGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceCooperativeMatrixFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cooperativeMatrix = cooperativeMatrix_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cooperativeMatrix == rhs.cooperativeMatrix )
|
|
&& ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceCooperativeMatrixPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, cooperativeMatrixSupportedStages );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceCornerSampledImageFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: cornerSampledImage( cornerSampledImage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
cornerSampledImage = cornerSampledImage_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, cornerSampledImage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( cornerSampledImage == rhs.cornerSampledImage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value, "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceCoverageReductionModeFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: coverageReductionMode( coverageReductionMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageReductionMode = coverageReductionMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, coverageReductionMode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( coverageReductionMode == rhs.coverageReductionMode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceCustomBorderColorFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColors = customBorderColors_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( customBorderColors == rhs.customBorderColors )
|
|
&& ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceCustomBorderColorPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxCustomBorderColorSamplers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxCustomBorderColorSamplers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dedicatedAllocationImageAliasing );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceDepthClipControlFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: depthClipControl( depthClipControl_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipControl = depthClipControl_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthClipControlFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, depthClipControl );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( depthClipControl == rhs.depthClipControl );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value, "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDepthClipControlFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDepthClipEnableFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: depthClipEnable( depthClipEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipEnable = depthClipEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, depthClipEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( depthClipEnable == rhs.depthClipEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDepthStencilResolveProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
|
|
&& ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
|
|
&& ( independentResolveNone == rhs.independentResolveNone )
|
|
&& ( independentResolve == rhs.independentResolve );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value, "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
|
|
{
|
|
using Type = PhysicalDeviceDepthStencilResolveProperties;
|
|
};
|
|
using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
|
|
|
|
struct PhysicalDeviceDescriptorIndexingFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
runtimeDescriptorArray = runtimeDescriptorArray_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
|
|
&& ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
|
|
&& ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
|
|
&& ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
|
|
&& ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
|
|
&& ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
|
|
&& ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
|
|
&& ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
|
|
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value, "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
|
|
{
|
|
using Type = PhysicalDeviceDescriptorIndexingFeatures;
|
|
};
|
|
using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
|
|
|
|
struct PhysicalDeviceDescriptorIndexingProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
|
|
&& ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
|
|
&& ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
|
|
&& ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
|
|
&& ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
|
|
void * pNext = {};
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
|
|
uint32_t maxPerStageUpdateAfterBindResources = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value, "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
|
|
{
|
|
using Type = PhysicalDeviceDescriptorIndexingProperties;
|
|
};
|
|
using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
|
|
|
|
struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE
|
|
{
|
|
using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: descriptorSetHostMapping( descriptorSetHostMapping_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorSetHostMapping = descriptorSetHostMapping_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, descriptorSetHostMapping );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( descriptorSetHostMapping == rhs.descriptorSetHostMapping );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) == sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value, "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>
|
|
{
|
|
using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
|
|
};
|
|
|
|
struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceGeneratedCommands( deviceGeneratedCommands_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceGeneratedCommands = deviceGeneratedCommands_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceGeneratedCommands );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxGraphicsShaderGroupCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsStreamCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsStreamStride, minSequencesCountBufferOffsetAlignment, minSequencesIndexBufferOffsetAlignment, minIndirectCommandsBufferOffsetAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
|
|
&& ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
|
|
&& ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
|
|
&& ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
|
|
&& ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
|
|
&& ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
|
|
&& ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
|
|
&& ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
|
|
&& ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
|
|
void * pNext = {};
|
|
uint32_t maxGraphicsShaderGroupCount = {};
|
|
uint32_t maxIndirectSequenceCount = {};
|
|
uint32_t maxIndirectCommandsTokenCount = {};
|
|
uint32_t maxIndirectCommandsStreamCount = {};
|
|
uint32_t maxIndirectCommandsTokenOffset = {};
|
|
uint32_t maxIndirectCommandsStreamStride = {};
|
|
uint32_t minSequencesCountBufferOffsetAlignment = {};
|
|
uint32_t minSequencesIndexBufferOffsetAlignment = {};
|
|
uint32_t minIndirectCommandsBufferOffsetAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceMemoryReport( deviceMemoryReport_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceMemoryReport = deviceMemoryReport_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceMemoryReport );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceMemoryReport == rhs.deviceMemoryReport );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDiagnosticsConfigFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: diagnosticsConfig( diagnosticsConfig_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
diagnosticsConfig = diagnosticsConfig_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, diagnosticsConfig );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( diagnosticsConfig == rhs.diagnosticsConfig );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceDiscardRectanglePropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxDiscardRectangles( maxDiscardRectangles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxDiscardRectangles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxDiscardRectangles == rhs.maxDiscardRectangles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxDiscardRectangles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDriverProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceDriverProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( driverID == rhs.driverID )
|
|
&& ( driverName == rhs.driverName )
|
|
&& ( driverInfo == rhs.driverInfo )
|
|
&& ( conformanceVersion == rhs.conformanceVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
|
|
{
|
|
using Type = PhysicalDeviceDriverProperties;
|
|
};
|
|
using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
|
|
|
|
struct PhysicalDeviceDrmPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceDrmPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, int64_t primaryMajor_ = {}, int64_t primaryMinor_ = {}, int64_t renderMajor_ = {}, int64_t renderMinor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: hasPrimary( hasPrimary_ ), hasRender( hasRender_ ), primaryMajor( primaryMajor_ ), primaryMinor( primaryMinor_ ), renderMajor( renderMajor_ ), renderMinor( renderMinor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast<PhysicalDeviceDrmPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDrmPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDrmPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, int64_t const &, int64_t const &, int64_t const &, int64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( hasPrimary == rhs.hasPrimary )
|
|
&& ( hasRender == rhs.hasRender )
|
|
&& ( primaryMajor == rhs.primaryMajor )
|
|
&& ( primaryMinor == rhs.primaryMinor )
|
|
&& ( renderMajor == rhs.renderMajor )
|
|
&& ( renderMinor == rhs.renderMinor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hasRender = {};
|
|
int64_t primaryMajor = {};
|
|
int64_t primaryMinor = {};
|
|
int64_t renderMajor = {};
|
|
int64_t renderMinor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDrmPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceDrmPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceDynamicRenderingFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dynamicRendering( dynamicRendering_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast<PhysicalDeviceDynamicRenderingFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicRendering = dynamicRendering_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceDynamicRenderingFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dynamicRendering );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dynamicRendering == rhs.dynamicRendering );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value, "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceDynamicRenderingFeatures>
|
|
{
|
|
using Type = PhysicalDeviceDynamicRenderingFeatures;
|
|
};
|
|
using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
|
|
|
|
struct PhysicalDeviceExclusiveScissorFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: exclusiveScissor( exclusiveScissor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
exclusiveScissor = exclusiveScissor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exclusiveScissor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( exclusiveScissor == rhs.exclusiveScissor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value, "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: extendedDynamicState2( extendedDynamicState2_ ), extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ), extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2 = extendedDynamicState2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( extendedDynamicState2 == rhs.extendedDynamicState2 )
|
|
&& ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp )
|
|
&& ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value, "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: extendedDynamicState( extendedDynamicState_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extendedDynamicState = extendedDynamicState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, extendedDynamicState );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( extendedDynamicState == rhs.extendedDynamicState );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceExternalBufferInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalBufferInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), usage( usage_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::BufferCreateFlags const &, VULKAN_HPP_NAMESPACE::BufferUsageFlags const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, usage, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( usage == rhs.usage )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalBufferInfo;
|
|
};
|
|
using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
|
|
|
|
struct PhysicalDeviceExternalFenceInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalFenceInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalFenceInfo;
|
|
};
|
|
using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
|
|
|
|
struct PhysicalDeviceExternalImageFormatInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalImageFormatInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value, "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalImageFormatInfo;
|
|
};
|
|
using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
|
|
|
|
struct PhysicalDeviceExternalMemoryHostPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minImportedHostPointerAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceExternalMemoryRDMAFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: externalMemoryRDMA( externalMemoryRDMA_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast<PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
externalMemoryRDMA = externalMemoryRDMA_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalMemoryRDMAFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, externalMemoryRDMA );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( externalMemoryRDMA == rhs.externalMemoryRDMA );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value, "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceExternalSemaphoreInfo
|
|
{
|
|
using NativeType = VkPhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value, "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
|
|
{
|
|
using Type = PhysicalDeviceExternalSemaphoreInfo;
|
|
};
|
|
using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
|
|
|
|
struct PhysicalDeviceFeatures2
|
|
{
|
|
using NativeType = VkPhysicalDeviceFeatures2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: features( features_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
features = features_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, features );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( features == rhs.features );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
|
|
{
|
|
using Type = PhysicalDeviceFeatures2;
|
|
};
|
|
using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
|
|
|
|
struct PhysicalDeviceFloatControlsProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceFloatControlsProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
|
|
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
|
|
&& ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
|
|
&& ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
|
|
&& ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
|
|
&& ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
|
|
&& ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
|
|
&& ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
|
|
&& ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
|
|
&& ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
|
|
&& ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
|
|
&& ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
|
|
&& ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
|
|
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value, "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
|
|
{
|
|
using Type = PhysicalDeviceFloatControlsProperties;
|
|
};
|
|
using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
|
|
|
|
struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityMapDeferred );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subsampledLoads == rhs.subsampledLoads )
|
|
&& ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
|
|
&& ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
|
|
&& ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
|
|
uint32_t maxSubsampledArrayLayers = {};
|
|
uint32_t maxDescriptorSetSubsampledSamplers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentDensityMapFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMap = fragmentDensityMap_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityMap == rhs.fragmentDensityMap )
|
|
&& ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
|
|
&& ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityMapOffset( fragmentDensityMapOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMapOffset = fragmentDensityMapOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityMapOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) == sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value, "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityOffsetGranularity );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) == sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value, "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentDensityMapPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
|
|
&& ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
|
|
&& ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentShaderBarycentric( fragmentShaderBarycentric_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShaderBarycentricFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderBarycentric = fragmentShaderBarycentric_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentShaderBarycentric );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "PhysicalDeviceFragmentShaderBarycentricFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
|
|
&& ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
|
|
&& ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentShadingRateEnums( fragmentShadingRateEnums_ ), supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ), noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentShadingRateEnums = fragmentShadingRateEnums_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums )
|
|
&& ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates )
|
|
&& ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value, "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1) VULKAN_HPP_NOEXCEPT
|
|
: maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value, "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ), primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ), attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate )
|
|
&& ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate )
|
|
&& ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRateKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRateKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sampleCounts( sampleCounts_ ), fragmentSize( fragmentSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleCounts, fragmentSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleCounts == rhs.sampleCounts )
|
|
&& ( fragmentSize == rhs.fragmentSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRateKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceFragmentShadingRatePropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ), primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ), layeredShadingRateAttachments( layeredShadingRateAttachments_ ), fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ), maxFragmentSize( maxFragmentSize_ ), maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ), maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ), maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ), fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ), fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ), fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ), fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ), fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ), fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ), fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSizeAspectRatio, primitiveFragmentShadingRateWithMultipleViewports, layeredShadingRateAttachments, fragmentShadingRateNonTrivialCombinerOps, maxFragmentSize, maxFragmentSizeAspectRatio, maxFragmentShadingRateCoverageSamples, maxFragmentShadingRateRasterizationSamples, fragmentShadingRateWithShaderDepthStencilWrites, fragmentShadingRateWithSampleMask, fragmentShadingRateWithShaderSampleMask, fragmentShadingRateWithConservativeRasterization, fragmentShadingRateWithFragmentShaderInterlock, fragmentShadingRateWithCustomSampleLocations, fragmentShadingRateStrictMultiplyCombiner );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize )
|
|
&& ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize )
|
|
&& ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio )
|
|
&& ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports )
|
|
&& ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments )
|
|
&& ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps )
|
|
&& ( maxFragmentSize == rhs.maxFragmentSize )
|
|
&& ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio )
|
|
&& ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples )
|
|
&& ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples )
|
|
&& ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites )
|
|
&& ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask )
|
|
&& ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask )
|
|
&& ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization )
|
|
&& ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock )
|
|
&& ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations )
|
|
&& ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
|
|
uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
|
|
uint32_t maxFragmentSizeAspectRatio = {};
|
|
uint32_t maxFragmentShadingRateCoverageSamples = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
|
|
{
|
|
using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: globalPriorityQuery( globalPriorityQuery_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
globalPriorityQuery = globalPriorityQuery_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, globalPriorityQuery );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( globalPriorityQuery == rhs.globalPriorityQuery );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value, "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
|
|
};
|
|
using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
|
|
|
|
struct PhysicalDeviceGroupProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceGroupProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
|
|
&& ( physicalDevices == rhs.physicalDevices )
|
|
&& ( subsetAllocation == rhs.subsetAllocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
|
|
void * pNext = {};
|
|
uint32_t physicalDeviceCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
|
|
{
|
|
using Type = PhysicalDeviceGroupProperties;
|
|
};
|
|
using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
|
|
|
|
struct PhysicalDeviceHostQueryResetFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceHostQueryResetFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: hostQueryReset( hostQueryReset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostQueryReset = hostQueryReset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, hostQueryReset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( hostQueryReset == rhs.hostQueryReset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value, "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
|
|
{
|
|
using Type = PhysicalDeviceHostQueryResetFeatures;
|
|
};
|
|
using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
|
|
|
|
struct PhysicalDeviceIDProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceIDProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceUUID == rhs.deviceUUID )
|
|
&& ( driverUUID == rhs.driverUUID )
|
|
&& ( deviceLUID == rhs.deviceLUID )
|
|
&& ( deviceNodeMask == rhs.deviceNodeMask )
|
|
&& ( deviceLUIDValid == rhs.deviceLUIDValid );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
|
|
uint32_t deviceNodeMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "PhysicalDeviceIDProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
|
|
{
|
|
using Type = PhysicalDeviceIDProperties;
|
|
};
|
|
using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
|
|
|
|
struct PhysicalDeviceImageDrmFormatModifierInfoEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
|
|
: drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drmFormatModifier = drmFormatModifier_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sharingMode = sharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( drmFormatModifier == rhs.drmFormatModifier )
|
|
&& ( sharingMode == rhs.sharingMode )
|
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
const void * pNext = {};
|
|
uint64_t drmFormatModifier = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
|
|
{
|
|
using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceImageFormatInfo2
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageFormatInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::ImageTiling const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageCreateFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, type, tiling, usage, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( type == rhs.type )
|
|
&& ( tiling == rhs.tiling )
|
|
&& ( usage == rhs.usage )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
|
|
{
|
|
using Type = PhysicalDeviceImageFormatInfo2;
|
|
};
|
|
using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
|
|
|
|
struct PhysicalDeviceImageRobustnessFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageRobustnessFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: robustImageAccess( robustImageAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast<PhysicalDeviceImageRobustnessFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess = robustImageAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustImageAccess );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( robustImageAccess == rhs.robustImageAccess );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value, "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeatures>
|
|
{
|
|
using Type = PhysicalDeviceImageRobustnessFeatures;
|
|
};
|
|
using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
|
|
|
|
struct PhysicalDeviceImageViewImageFormatInfoEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D) VULKAN_HPP_NOEXCEPT
|
|
: imageViewType( imageViewType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewType = imageViewType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageViewType const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageViewType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageViewType == rhs.imageViewType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value, "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
|
|
{
|
|
using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceImageViewMinLodFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minLod( minLod_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLod = minLod_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImageViewMinLodFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minLod );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minLod == rhs.minLod );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 minLod = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value, "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceImageViewMinLodFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceImagelessFramebufferFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imagelessFramebuffer( imagelessFramebuffer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagelessFramebuffer = imagelessFramebuffer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imagelessFramebuffer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value, "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
|
|
{
|
|
using Type = PhysicalDeviceImagelessFramebufferFeatures;
|
|
};
|
|
using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
|
|
|
|
struct PhysicalDeviceIndexTypeUint8FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: indexTypeUint8( indexTypeUint8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
indexTypeUint8 = indexTypeUint8_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, indexTypeUint8 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( indexTypeUint8 == rhs.indexTypeUint8 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceInheritedViewportScissorFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: inheritedViewportScissor2D( inheritedViewportScissor2D_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inheritedViewportScissor2D = inheritedViewportScissor2D_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, inheritedViewportScissor2D );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) == sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value, "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceInlineUniformBlockFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inlineUniformBlock = inlineUniformBlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( inlineUniformBlock == rhs.inlineUniformBlock )
|
|
&& ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value, "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeatures>
|
|
{
|
|
using Type = PhysicalDeviceInlineUniformBlockFeatures;
|
|
};
|
|
using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
|
|
|
|
struct PhysicalDeviceInlineUniformBlockProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast<PhysicalDeviceInlineUniformBlockProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
|
|
&& ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
|
|
&& ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
|
|
&& ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties;
|
|
void * pNext = {};
|
|
uint32_t maxInlineUniformBlockSize = {};
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value, "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockProperties>
|
|
{
|
|
using Type = PhysicalDeviceInlineUniformBlockProperties;
|
|
};
|
|
using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
|
|
|
|
struct PhysicalDeviceInvocationMaskFeaturesHUAWEI
|
|
{
|
|
using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: invocationMask( invocationMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
invocationMask = invocationMask_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceInvocationMaskFeaturesHUAWEI*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, invocationMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( invocationMask == rhs.invocationMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value, "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI>
|
|
{
|
|
using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
|
|
};
|
|
|
|
struct PhysicalDeviceLimits
|
|
{
|
|
using NativeType = VkPhysicalDeviceLimits;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const & maxViewportDimensions_ = {}, std::array<float,2> const & viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const & pointSizeRange_ = {}, std::array<float,2> const & lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, int32_t const &, uint32_t const &, int32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( maxImageDimension1D, maxImageDimension2D, maxImageDimension3D, maxImageDimensionCube, maxImageArrayLayers, maxTexelBufferElements, maxUniformBufferRange, maxStorageBufferRange, maxPushConstantsSize, maxMemoryAllocationCount, maxSamplerAllocationCount, bufferImageGranularity, sparseAddressSpaceSize, maxBoundDescriptorSets, maxPerStageDescriptorSamplers, maxPerStageDescriptorUniformBuffers, maxPerStageDescriptorStorageBuffers, maxPerStageDescriptorSampledImages, maxPerStageDescriptorStorageImages, maxPerStageDescriptorInputAttachments, maxPerStageResources, maxDescriptorSetSamplers, maxDescriptorSetUniformBuffers, maxDescriptorSetUniformBuffersDynamic, maxDescriptorSetStorageBuffers, maxDescriptorSetStorageBuffersDynamic, maxDescriptorSetSampledImages, maxDescriptorSetStorageImages, maxDescriptorSetInputAttachments, maxVertexInputAttributes, maxVertexInputBindings, maxVertexInputAttributeOffset, maxVertexInputBindingStride, maxVertexOutputComponents, maxTessellationGenerationLevel, maxTessellationPatchSize, maxTessellationControlPerVertexInputComponents, maxTessellationControlPerVertexOutputComponents, maxTessellationControlPerPatchOutputComponents, maxTessellationControlTotalOutputComponents, maxTessellationEvaluationInputComponents, maxTessellationEvaluationOutputComponents, maxGeometryShaderInvocations, maxGeometryInputComponents, maxGeometryOutputComponents, maxGeometryOutputVertices, maxGeometryTotalOutputComponents, maxFragmentInputComponents, maxFragmentOutputAttachments, maxFragmentDualSrcAttachments, maxFragmentCombinedOutputResources, maxComputeSharedMemorySize, maxComputeWorkGroupCount, maxComputeWorkGroupInvocations, maxComputeWorkGroupSize, subPixelPrecisionBits, subTexelPrecisionBits, mipmapPrecisionBits, maxDrawIndexedIndexValue, maxDrawIndirectCount, maxSamplerLodBias, maxSamplerAnisotropy, maxViewports, maxViewportDimensions, viewportBoundsRange, viewportSubPixelBits, minMemoryMapAlignment, minTexelBufferOffsetAlignment, minUniformBufferOffsetAlignment, minStorageBufferOffsetAlignment, minTexelOffset, maxTexelOffset, minTexelGatherOffset, maxTexelGatherOffset, minInterpolationOffset, maxInterpolationOffset, subPixelInterpolationOffsetBits, maxFramebufferWidth, maxFramebufferHeight, maxFramebufferLayers, framebufferColorSampleCounts, framebufferDepthSampleCounts, framebufferStencilSampleCounts, framebufferNoAttachmentsSampleCounts, maxColorAttachments, sampledImageColorSampleCounts, sampledImageIntegerSampleCounts, sampledImageDepthSampleCounts, sampledImageStencilSampleCounts, storageImageSampleCounts, maxSampleMaskWords, timestampComputeAndGraphics, timestampPeriod, maxClipDistances, maxCullDistances, maxCombinedClipAndCullDistances, discreteQueuePriorities, pointSizeRange, lineWidthRange, pointSizeGranularity, lineWidthGranularity, strictLines, standardSampleLocations, optimalBufferCopyOffsetAlignment, optimalBufferCopyRowPitchAlignment, nonCoherentAtomSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLimits const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( maxImageDimension1D == rhs.maxImageDimension1D )
|
|
&& ( maxImageDimension2D == rhs.maxImageDimension2D )
|
|
&& ( maxImageDimension3D == rhs.maxImageDimension3D )
|
|
&& ( maxImageDimensionCube == rhs.maxImageDimensionCube )
|
|
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
|
|
&& ( maxTexelBufferElements == rhs.maxTexelBufferElements )
|
|
&& ( maxUniformBufferRange == rhs.maxUniformBufferRange )
|
|
&& ( maxStorageBufferRange == rhs.maxStorageBufferRange )
|
|
&& ( maxPushConstantsSize == rhs.maxPushConstantsSize )
|
|
&& ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
|
|
&& ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
|
|
&& ( bufferImageGranularity == rhs.bufferImageGranularity )
|
|
&& ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
|
|
&& ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
|
|
&& ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
|
|
&& ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
|
|
&& ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
|
|
&& ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
|
|
&& ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
|
|
&& ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
|
|
&& ( maxPerStageResources == rhs.maxPerStageResources )
|
|
&& ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
|
|
&& ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
|
|
&& ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
|
|
&& ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
|
|
&& ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
|
|
&& ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
|
|
&& ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
|
|
&& ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
|
|
&& ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
|
|
&& ( maxVertexInputBindings == rhs.maxVertexInputBindings )
|
|
&& ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
|
|
&& ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
|
|
&& ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
|
|
&& ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
|
|
&& ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
|
|
&& ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
|
|
&& ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
|
|
&& ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
|
|
&& ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
|
|
&& ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
|
|
&& ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
|
|
&& ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
|
|
&& ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
|
|
&& ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
|
|
&& ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
|
|
&& ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
|
|
&& ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
|
|
&& ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
|
|
&& ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
|
|
&& ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
|
|
&& ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
|
|
&& ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
|
|
&& ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
|
|
&& ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
|
|
&& ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
|
|
&& ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
|
|
&& ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
|
|
&& ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
|
|
&& ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
|
|
&& ( maxSamplerLodBias == rhs.maxSamplerLodBias )
|
|
&& ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
|
|
&& ( maxViewports == rhs.maxViewports )
|
|
&& ( maxViewportDimensions == rhs.maxViewportDimensions )
|
|
&& ( viewportBoundsRange == rhs.viewportBoundsRange )
|
|
&& ( viewportSubPixelBits == rhs.viewportSubPixelBits )
|
|
&& ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
|
|
&& ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
|
|
&& ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
|
|
&& ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
|
|
&& ( minTexelOffset == rhs.minTexelOffset )
|
|
&& ( maxTexelOffset == rhs.maxTexelOffset )
|
|
&& ( minTexelGatherOffset == rhs.minTexelGatherOffset )
|
|
&& ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
|
|
&& ( minInterpolationOffset == rhs.minInterpolationOffset )
|
|
&& ( maxInterpolationOffset == rhs.maxInterpolationOffset )
|
|
&& ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
|
|
&& ( maxFramebufferWidth == rhs.maxFramebufferWidth )
|
|
&& ( maxFramebufferHeight == rhs.maxFramebufferHeight )
|
|
&& ( maxFramebufferLayers == rhs.maxFramebufferLayers )
|
|
&& ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
|
|
&& ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
|
|
&& ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
|
|
&& ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
|
|
&& ( maxColorAttachments == rhs.maxColorAttachments )
|
|
&& ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
|
|
&& ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
|
|
&& ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
|
|
&& ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
|
|
&& ( storageImageSampleCounts == rhs.storageImageSampleCounts )
|
|
&& ( maxSampleMaskWords == rhs.maxSampleMaskWords )
|
|
&& ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
|
|
&& ( timestampPeriod == rhs.timestampPeriod )
|
|
&& ( maxClipDistances == rhs.maxClipDistances )
|
|
&& ( maxCullDistances == rhs.maxCullDistances )
|
|
&& ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
|
|
&& ( discreteQueuePriorities == rhs.discreteQueuePriorities )
|
|
&& ( pointSizeRange == rhs.pointSizeRange )
|
|
&& ( lineWidthRange == rhs.lineWidthRange )
|
|
&& ( pointSizeGranularity == rhs.pointSizeGranularity )
|
|
&& ( lineWidthGranularity == rhs.lineWidthGranularity )
|
|
&& ( strictLines == rhs.strictLines )
|
|
&& ( standardSampleLocations == rhs.standardSampleLocations )
|
|
&& ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
|
|
&& ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
|
|
&& ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t maxImageDimension1D = {};
|
|
uint32_t maxImageDimension2D = {};
|
|
uint32_t maxImageDimension3D = {};
|
|
uint32_t maxImageDimensionCube = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
uint32_t maxTexelBufferElements = {};
|
|
uint32_t maxUniformBufferRange = {};
|
|
uint32_t maxStorageBufferRange = {};
|
|
uint32_t maxPushConstantsSize = {};
|
|
uint32_t maxMemoryAllocationCount = {};
|
|
uint32_t maxSamplerAllocationCount = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
|
|
uint32_t maxBoundDescriptorSets = {};
|
|
uint32_t maxPerStageDescriptorSamplers = {};
|
|
uint32_t maxPerStageDescriptorUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorSampledImages = {};
|
|
uint32_t maxPerStageDescriptorStorageImages = {};
|
|
uint32_t maxPerStageDescriptorInputAttachments = {};
|
|
uint32_t maxPerStageResources = {};
|
|
uint32_t maxDescriptorSetSamplers = {};
|
|
uint32_t maxDescriptorSetUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetStorageBuffers = {};
|
|
uint32_t maxDescriptorSetStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetSampledImages = {};
|
|
uint32_t maxDescriptorSetStorageImages = {};
|
|
uint32_t maxDescriptorSetInputAttachments = {};
|
|
uint32_t maxVertexInputAttributes = {};
|
|
uint32_t maxVertexInputBindings = {};
|
|
uint32_t maxVertexInputAttributeOffset = {};
|
|
uint32_t maxVertexInputBindingStride = {};
|
|
uint32_t maxVertexOutputComponents = {};
|
|
uint32_t maxTessellationGenerationLevel = {};
|
|
uint32_t maxTessellationPatchSize = {};
|
|
uint32_t maxTessellationControlPerVertexInputComponents = {};
|
|
uint32_t maxTessellationControlPerVertexOutputComponents = {};
|
|
uint32_t maxTessellationControlPerPatchOutputComponents = {};
|
|
uint32_t maxTessellationControlTotalOutputComponents = {};
|
|
uint32_t maxTessellationEvaluationInputComponents = {};
|
|
uint32_t maxTessellationEvaluationOutputComponents = {};
|
|
uint32_t maxGeometryShaderInvocations = {};
|
|
uint32_t maxGeometryInputComponents = {};
|
|
uint32_t maxGeometryOutputComponents = {};
|
|
uint32_t maxGeometryOutputVertices = {};
|
|
uint32_t maxGeometryTotalOutputComponents = {};
|
|
uint32_t maxFragmentInputComponents = {};
|
|
uint32_t maxFragmentOutputAttachments = {};
|
|
uint32_t maxFragmentDualSrcAttachments = {};
|
|
uint32_t maxFragmentCombinedOutputResources = {};
|
|
uint32_t maxComputeSharedMemorySize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
|
|
uint32_t maxComputeWorkGroupInvocations = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
|
|
uint32_t subPixelPrecisionBits = {};
|
|
uint32_t subTexelPrecisionBits = {};
|
|
uint32_t mipmapPrecisionBits = {};
|
|
uint32_t maxDrawIndexedIndexValue = {};
|
|
uint32_t maxDrawIndirectCount = {};
|
|
float maxSamplerLodBias = {};
|
|
float maxSamplerAnisotropy = {};
|
|
uint32_t maxViewports = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
|
|
uint32_t viewportSubPixelBits = {};
|
|
size_t minMemoryMapAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
|
|
int32_t minTexelOffset = {};
|
|
uint32_t maxTexelOffset = {};
|
|
int32_t minTexelGatherOffset = {};
|
|
uint32_t maxTexelGatherOffset = {};
|
|
float minInterpolationOffset = {};
|
|
float maxInterpolationOffset = {};
|
|
uint32_t subPixelInterpolationOffsetBits = {};
|
|
uint32_t maxFramebufferWidth = {};
|
|
uint32_t maxFramebufferHeight = {};
|
|
uint32_t maxFramebufferLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
|
|
uint32_t maxColorAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
|
|
uint32_t maxSampleMaskWords = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
|
|
float timestampPeriod = {};
|
|
uint32_t maxClipDistances = {};
|
|
uint32_t maxCullDistances = {};
|
|
uint32_t maxCombinedClipAndCullDistances = {};
|
|
uint32_t discreteQueuePriorities = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
|
|
float pointSizeGranularity = {};
|
|
float lineWidthGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "PhysicalDeviceLimits is not nothrow_move_constructible!" );
|
|
|
|
struct PhysicalDeviceLineRasterizationFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangularLines = rectangularLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bresenhamLines = bresenhamLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
smoothLines = smoothLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledRectangularLines = stippledRectangularLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledBresenhamLines = stippledBresenhamLines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledSmoothLines = stippledSmoothLines_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rectangularLines == rhs.rectangularLines )
|
|
&& ( bresenhamLines == rhs.bresenhamLines )
|
|
&& ( smoothLines == rhs.smoothLines )
|
|
&& ( stippledRectangularLines == rhs.stippledRectangularLines )
|
|
&& ( stippledBresenhamLines == rhs.stippledBresenhamLines )
|
|
&& ( stippledSmoothLines == rhs.stippledSmoothLines );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value, "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceLineRasterizationPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, lineSubPixelPrecisionBits );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t lineSubPixelPrecisionBits = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value, "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceLinearColorAttachmentFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: linearColorAttachment( linearColorAttachment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast<PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
linearColorAttachment = linearColorAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceLinearColorAttachmentFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, linearColorAttachment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( linearColorAttachment == rhs.linearColorAttachment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value, "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceMaintenance3Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance3Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
|
|
&& ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
|
|
void * pNext = {};
|
|
uint32_t maxPerSetDescriptors = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value, "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance3Properties;
|
|
};
|
|
using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
|
|
|
|
struct PhysicalDeviceMaintenance4Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance4Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maintenance4( maintenance4_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance4Features( *reinterpret_cast<PhysicalDeviceMaintenance4Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maintenance4 = maintenance4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maintenance4 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maintenance4 == rhs.maintenance4 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value, "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Features>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance4Features;
|
|
};
|
|
using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
|
|
|
|
struct PhysicalDeviceMaintenance4Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMaintenance4Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties(VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxBufferSize( maxBufferSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMaintenance4Properties( *reinterpret_cast<PhysicalDeviceMaintenance4Properties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMaintenance4Properties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMaintenance4Properties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxBufferSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxBufferSize == rhs.maxBufferSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value, "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance4Properties>
|
|
{
|
|
using Type = PhysicalDeviceMaintenance4Properties;
|
|
};
|
|
using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
|
|
|
|
struct PhysicalDeviceMemoryBudgetPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const & heapUsage_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: heapBudget( heapBudget_ ), heapUsage( heapUsage_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, heapBudget, heapUsage );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( heapBudget == rhs.heapBudget )
|
|
&& ( heapUsage == rhs.heapUsage );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceMemoryPriorityFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryPriority( memoryPriority_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryPriority = memoryPriority_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryPriority );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryPriority == rhs.memoryPriority );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceMemoryProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const & memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( memoryTypeCount == rhs.memoryTypeCount )
|
|
&& ( memoryTypes == rhs.memoryTypes )
|
|
&& ( memoryHeapCount == rhs.memoryHeapCount )
|
|
&& ( memoryHeaps == rhs.memoryHeaps );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t memoryTypeCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
|
|
uint32_t memoryHeapCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" );
|
|
|
|
struct PhysicalDeviceMemoryProperties2
|
|
{
|
|
using NativeType = VkPhysicalDeviceMemoryProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryProperties( memoryProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryProperties == rhs.memoryProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
|
|
{
|
|
using Type = PhysicalDeviceMemoryProperties2;
|
|
};
|
|
using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
|
|
|
|
struct PhysicalDeviceMeshShaderFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: taskShader( taskShader_ ), meshShader( meshShader_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
taskShader = taskShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
meshShader = meshShader_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, taskShader, meshShader );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( taskShader == rhs.taskShader )
|
|
&& ( meshShader == rhs.meshShader );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value, "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceMeshShaderFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceMeshShaderPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxDrawMeshTasksCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskTotalMemorySize, maxTaskOutputCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshTotalMemorySize, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
|
|
&& ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
|
|
&& ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
|
|
&& ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
|
|
&& ( maxTaskOutputCount == rhs.maxTaskOutputCount )
|
|
&& ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
|
|
&& ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
|
|
&& ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
|
|
&& ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
|
|
&& ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
|
|
&& ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
|
|
&& ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
|
|
&& ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
|
|
void * pNext = {};
|
|
uint32_t maxDrawMeshTasksCount = {};
|
|
uint32_t maxTaskWorkGroupInvocations = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
|
|
uint32_t maxTaskTotalMemorySize = {};
|
|
uint32_t maxTaskOutputCount = {};
|
|
uint32_t maxMeshWorkGroupInvocations = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
|
|
uint32_t maxMeshTotalMemorySize = {};
|
|
uint32_t maxMeshOutputVertices = {};
|
|
uint32_t maxMeshOutputPrimitives = {};
|
|
uint32_t maxMeshMultiviewViewCount = {};
|
|
uint32_t meshOutputPerVertexGranularity = {};
|
|
uint32_t meshOutputPerPrimitiveGranularity = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value, "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceMeshShaderPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceMultiDrawFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: multiDraw( multiDraw_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiDraw = multiDraw_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiDrawFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, multiDraw );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( multiDraw == rhs.multiDraw );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value, "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceMultiDrawFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceMultiDrawPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT(uint32_t maxMultiDrawCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxMultiDrawCount( maxMultiDrawCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast<PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiDrawPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxMultiDrawCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxMultiDrawCount == rhs.maxMultiDrawCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxMultiDrawCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value, "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiDrawPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceMultiDrawPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceMultiviewFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiviewFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiview = multiview_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewGeometryShader = multiviewGeometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewTessellationShader = multiviewTessellationShader_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( multiview == rhs.multiview )
|
|
&& ( multiviewGeometryShader == rhs.multiviewGeometryShader )
|
|
&& ( multiviewTessellationShader == rhs.multiviewTessellationShader );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
|
|
{
|
|
using Type = PhysicalDeviceMultiviewFeatures;
|
|
};
|
|
using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
|
|
|
|
struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: perViewPositionAllComponents( perViewPositionAllComponents_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, perViewPositionAllComponents );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
|
|
{
|
|
using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
|
|
};
|
|
|
|
struct PhysicalDeviceMultiviewProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceMultiviewProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
|
|
&& ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
|
|
void * pNext = {};
|
|
uint32_t maxMultiviewViewCount = {};
|
|
uint32_t maxMultiviewInstanceIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
|
|
{
|
|
using Type = PhysicalDeviceMultiviewProperties;
|
|
};
|
|
using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
|
|
|
|
struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE
|
|
{
|
|
using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: mutableDescriptorType( mutableDescriptorType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mutableDescriptorType = mutableDescriptorType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, mutableDescriptorType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( mutableDescriptorType == rhs.mutableDescriptorType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) == sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value, "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE>
|
|
{
|
|
using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
|
|
};
|
|
|
|
struct PhysicalDevicePCIBusInfoPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pciDomain == rhs.pciDomain )
|
|
&& ( pciBus == rhs.pciBus )
|
|
&& ( pciDevice == rhs.pciDevice )
|
|
&& ( pciFunction == rhs.pciFunction );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t pciDomain = {};
|
|
uint32_t pciBus = {};
|
|
uint32_t pciDevice = {};
|
|
uint32_t pciFunction = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value, "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pageableDeviceLocalMemory( pageableDeviceLocalMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pageableDeviceLocalMemory = pageableDeviceLocalMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pageableDeviceLocalMemory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) == sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value, "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDevicePerformanceQueryFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
performanceCounterQueryPools = performanceCounterQueryPools_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
|
|
&& ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value, "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePerformanceQueryPropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, allowCommandBufferQueryCopies );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value, "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
|
|
{
|
|
using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePipelineCreationCacheControlFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipelineCreationCacheControl( pipelineCreationCacheControl_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures & operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCreationCacheControl = pipelineCreationCacheControl_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineCreationCacheControl );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value, "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures>
|
|
{
|
|
using Type = PhysicalDevicePipelineCreationCacheControlFeatures;
|
|
};
|
|
using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures;
|
|
|
|
struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipelineExecutableInfo( pipelineExecutableInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineExecutableInfo = pipelineExecutableInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipelineExecutableInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePointClippingProperties
|
|
{
|
|
using NativeType = VkPhysicalDevicePointClippingProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes) VULKAN_HPP_NOEXCEPT
|
|
: pointClippingBehavior( pointClippingBehavior_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pointClippingBehavior );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pointClippingBehavior == rhs.pointClippingBehavior );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value, "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
|
|
{
|
|
using Type = PhysicalDevicePointClippingProperties;
|
|
};
|
|
using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct PhysicalDevicePortabilitySubsetFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
events = events_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewFormatSwizzle = imageViewFormatSwizzle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView2DOn3DImage = imageView2DOn3DImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multisampleArrayImage = multisampleArrayImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mutableComparisonSamplers = mutableComparisonSamplers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pointPolygons = pointPolygons_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerMipLodBias = samplerMipLodBias_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
separateStencilMaskRef = separateStencilMaskRef_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tessellationIsolines = tessellationIsolines_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tessellationPointMode = tessellationPointMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
triangleFans = triangleFans_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, constantAlphaColorBlendFactors, events, imageViewFormatReinterpretation, imageViewFormatSwizzle, imageView2DOn3DImage, multisampleArrayImage, mutableComparisonSamplers, pointPolygons, samplerMipLodBias, separateStencilMaskRef, shaderSampleRateInterpolationFunctions, tessellationIsolines, tessellationPointMode, triangleFans, vertexAttributeAccessBeyondStride );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
|
|
&& ( events == rhs.events )
|
|
&& ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
|
|
&& ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
|
|
&& ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
|
|
&& ( multisampleArrayImage == rhs.multisampleArrayImage )
|
|
&& ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
|
|
&& ( pointPolygons == rhs.pointPolygons )
|
|
&& ( samplerMipLodBias == rhs.samplerMipLodBias )
|
|
&& ( separateStencilMaskRef == rhs.separateStencilMaskRef )
|
|
&& ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
|
|
&& ( tessellationIsolines == rhs.tessellationIsolines )
|
|
&& ( tessellationPointMode == rhs.tessellationPointMode )
|
|
&& ( triangleFans == rhs.triangleFans )
|
|
&& ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 events = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct PhysicalDevicePortabilitySubsetPropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minVertexInputBindingStrideAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t minVertexInputBindingStrideAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
|
|
{
|
|
using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
struct PhysicalDevicePresentIdFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePresentIdFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: presentId( presentId_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentIdFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
presentId = presentId_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePresentIdFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePresentIdFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, presentId );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( presentId == rhs.presentId );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 presentId = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value, "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePresentIdFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePresentIdFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePresentWaitFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: presentWait( presentWait_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast<PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
presentWait = presentWait_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePresentWaitFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, presentWait );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( presentWait == rhs.presentWait );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 presentWait = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value, "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePresentWaitFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDevicePresentWaitFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: primitiveTopologyListRestart( primitiveTopologyListRestart_ ), primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveTopologyListRestart = primitiveTopologyListRestart_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart )
|
|
&& ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) == sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value, "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDevicePrivateDataFeatures
|
|
{
|
|
using NativeType = VkPhysicalDevicePrivateDataFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: privateData( privateData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePrivateDataFeatures( *reinterpret_cast<PhysicalDevicePrivateDataFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateData = privateData_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePrivateDataFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, privateData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( privateData == rhs.privateData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeatures>
|
|
{
|
|
using Type = PhysicalDevicePrivateDataFeatures;
|
|
};
|
|
using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
|
|
|
|
struct PhysicalDeviceSparseProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSparseProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( residencyStandard2DBlockShape, residencyStandard2DMultisampleBlockShape, residencyStandard3DBlockShape, residencyAlignedMipSize, residencyNonResidentStrict );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
|
|
&& ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
|
|
&& ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
|
|
&& ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
|
|
&& ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" );
|
|
|
|
struct PhysicalDeviceProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceType const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( apiVersion == rhs.apiVersion )
|
|
&& ( driverVersion == rhs.driverVersion )
|
|
&& ( vendorID == rhs.vendorID )
|
|
&& ( deviceID == rhs.deviceID )
|
|
&& ( deviceType == rhs.deviceType )
|
|
&& ( deviceName == rhs.deviceName )
|
|
&& ( pipelineCacheUUID == rhs.pipelineCacheUUID )
|
|
&& ( limits == rhs.limits )
|
|
&& ( sparseProperties == rhs.sparseProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t apiVersion = {};
|
|
uint32_t driverVersion = {};
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "PhysicalDeviceProperties is not nothrow_move_constructible!" );
|
|
|
|
struct PhysicalDeviceProperties2
|
|
{
|
|
using NativeType = VkPhysicalDeviceProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: properties( properties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, properties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( properties == rhs.properties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "PhysicalDeviceProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
|
|
{
|
|
using Type = PhysicalDeviceProperties2;
|
|
};
|
|
using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
|
|
|
|
struct PhysicalDeviceProtectedMemoryFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceProtectedMemoryFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: protectedMemory( protectedMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedMemory = protectedMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedMemory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( protectedMemory == rhs.protectedMemory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value, "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
|
|
{
|
|
using Type = PhysicalDeviceProtectedMemoryFeatures;
|
|
};
|
|
|
|
struct PhysicalDeviceProtectedMemoryProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceProtectedMemoryProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: protectedNoFault( protectedNoFault_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedNoFault );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( protectedNoFault == rhs.protectedNoFault );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value, "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
|
|
{
|
|
using Type = PhysicalDeviceProtectedMemoryProperties;
|
|
};
|
|
|
|
struct PhysicalDeviceProvokingVertexFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: provokingVertexLast( provokingVertexLast_ ), transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
provokingVertexLast = provokingVertexLast_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( provokingVertexLast == rhs.provokingVertexLast )
|
|
&& ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value, "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceProvokingVertexPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ), transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline )
|
|
&& ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value, "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDevicePushDescriptorPropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxPushDescriptors( maxPushDescriptors_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxPushDescriptors );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxPushDescriptors == rhs.maxPushDescriptors );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t maxPushDescriptors = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value, "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
|
|
{
|
|
using Type = PhysicalDevicePushDescriptorPropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value, "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
|
|
{
|
|
using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ), rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ), rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( *reinterpret_cast<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess )
|
|
&& ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess )
|
|
&& ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ) == sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>::value, "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM>
|
|
{
|
|
using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
|
|
};
|
|
|
|
struct PhysicalDeviceRayQueryFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rayQuery( rayQuery_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayQuery = rayQuery_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rayQuery );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rayQuery == rhs.rayQuery );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceRayQueryFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceRayTracingMotionBlurFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rayTracingMotionBlur( rayTracingMotionBlur_ ), rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast<PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingMotionBlur = rayTracingMotionBlur_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayTracingMotionBlurFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rayTracingMotionBlur == rhs.rayTracingMotionBlur )
|
|
&& ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) == sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value, "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceRayTracingPipelineFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rayTracingPipeline( rayTracingPipeline_ ), rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ), rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ), rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ), rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingPipeline = rayTracingPipeline_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rayTracingPipeline, rayTracingPipelineShaderGroupHandleCaptureReplay, rayTracingPipelineShaderGroupHandleCaptureReplayMixed, rayTracingPipelineTraceRaysIndirect, rayTraversalPrimitiveCulling );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rayTracingPipeline == rhs.rayTracingPipeline )
|
|
&& ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay )
|
|
&& ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed )
|
|
&& ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect )
|
|
&& ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value, "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceRayTracingPipelinePropertiesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRayRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}, uint32_t maxRayDispatchInvocationCount_ = {}, uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRayRecursionDepth( maxRayRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ), maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ), shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ), maxRayHitAttributeSize( maxRayHitAttributeSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderGroupHandleSize, maxRayRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, shaderGroupHandleCaptureReplaySize, maxRayDispatchInvocationCount, shaderGroupHandleAlignment, maxRayHitAttributeSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
|
|
&& ( maxRayRecursionDepth == rhs.maxRayRecursionDepth )
|
|
&& ( maxShaderGroupStride == rhs.maxShaderGroupStride )
|
|
&& ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
|
|
&& ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize )
|
|
&& ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount )
|
|
&& ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment )
|
|
&& ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t shaderGroupHandleSize = {};
|
|
uint32_t maxRayRecursionDepth = {};
|
|
uint32_t maxShaderGroupStride = {};
|
|
uint32_t shaderGroupBaseAlignment = {};
|
|
uint32_t shaderGroupHandleCaptureReplaySize = {};
|
|
uint32_t maxRayDispatchInvocationCount = {};
|
|
uint32_t shaderGroupHandleAlignment = {};
|
|
uint32_t maxRayHitAttributeSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value, "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
|
|
{
|
|
using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceRayTracingPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceRayTracingPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint64_t const &, uint64_t const &, uint64_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderGroupHandleSize, maxRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, maxGeometryCount, maxInstanceCount, maxTriangleCount, maxDescriptorSetAccelerationStructures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
|
|
&& ( maxRecursionDepth == rhs.maxRecursionDepth )
|
|
&& ( maxShaderGroupStride == rhs.maxShaderGroupStride )
|
|
&& ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
|
|
&& ( maxGeometryCount == rhs.maxGeometryCount )
|
|
&& ( maxInstanceCount == rhs.maxInstanceCount )
|
|
&& ( maxTriangleCount == rhs.maxTriangleCount )
|
|
&& ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
|
|
void * pNext = {};
|
|
uint32_t shaderGroupHandleSize = {};
|
|
uint32_t maxRecursionDepth = {};
|
|
uint32_t maxShaderGroupStride = {};
|
|
uint32_t shaderGroupBaseAlignment = {};
|
|
uint64_t maxGeometryCount = {};
|
|
uint64_t maxInstanceCount = {};
|
|
uint64_t maxTriangleCount = {};
|
|
uint32_t maxDescriptorSetAccelerationStructures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value, "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceRayTracingPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: representativeFragmentTest( representativeFragmentTest_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
representativeFragmentTest = representativeFragmentTest_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, representativeFragmentTest );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( representativeFragmentTest == rhs.representativeFragmentTest );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceRobustness2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustBufferAccess2 = robustBufferAccess2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess2 = robustImageAccess2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
nullDescriptor = nullDescriptor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( robustBufferAccess2 == rhs.robustBufferAccess2 )
|
|
&& ( robustImageAccess2 == rhs.robustImageAccess2 )
|
|
&& ( nullDescriptor == rhs.nullDescriptor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value, "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceRobustness2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceRobustness2PropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
|
|
&& ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value, "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceRobustness2PropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceSampleLocationsPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const & sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleLocationSampleCounts, maxSampleLocationGridSize, sampleLocationCoordinateRange, sampleLocationSubPixelBits, variableSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
|
|
&& ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
|
|
&& ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
|
|
&& ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
|
|
&& ( variableSampleLocations == rhs.variableSampleLocations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
|
|
uint32_t sampleLocationSubPixelBits = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value, "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceSamplerFilterMinmaxProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
|
|
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value, "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
|
|
{
|
|
using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
|
|
};
|
|
using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
|
|
|
|
struct PhysicalDeviceSamplerYcbcrConversionFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: samplerYcbcrConversion( samplerYcbcrConversion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversion = samplerYcbcrConversion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, samplerYcbcrConversion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
};
|
|
using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
|
|
|
|
struct PhysicalDeviceScalarBlockLayoutFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: scalarBlockLayout( scalarBlockLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scalarBlockLayout = scalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, scalarBlockLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( scalarBlockLayout == rhs.scalarBlockLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value, "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
|
|
{
|
|
using Type = PhysicalDeviceScalarBlockLayoutFeatures;
|
|
};
|
|
using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
|
|
|
|
struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: separateDepthStencilLayouts( separateDepthStencilLayouts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
separateDepthStencilLayouts = separateDepthStencilLayouts_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, separateDepthStencilLayouts );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
};
|
|
using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
|
|
|
|
struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ), shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ), shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ), shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ), shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ), shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ), shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ), shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ), shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ), shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ), shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ), sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderBufferFloat16Atomics, shaderBufferFloat16AtomicAdd, shaderBufferFloat16AtomicMinMax, shaderBufferFloat32AtomicMinMax, shaderBufferFloat64AtomicMinMax, shaderSharedFloat16Atomics, shaderSharedFloat16AtomicAdd, shaderSharedFloat16AtomicMinMax, shaderSharedFloat32AtomicMinMax, shaderSharedFloat64AtomicMinMax, shaderImageFloat32AtomicMinMax, sparseImageFloat32AtomicMinMax );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics )
|
|
&& ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd )
|
|
&& ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax )
|
|
&& ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax )
|
|
&& ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax )
|
|
&& ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics )
|
|
&& ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd )
|
|
&& ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax )
|
|
&& ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax )
|
|
&& ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax )
|
|
&& ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax )
|
|
&& ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value, "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderBufferFloat32Atomics, shaderBufferFloat32AtomicAdd, shaderBufferFloat64Atomics, shaderBufferFloat64AtomicAdd, shaderSharedFloat32Atomics, shaderSharedFloat32AtomicAdd, shaderSharedFloat64Atomics, shaderSharedFloat64AtomicAdd, shaderImageFloat32Atomics, shaderImageFloat32AtomicAdd, sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
|
|
&& ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
|
|
&& ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
|
|
&& ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
|
|
&& ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
|
|
&& ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
|
|
&& ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
|
|
&& ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
|
|
&& ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
|
|
&& ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
|
|
&& ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
|
|
&& ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderAtomicInt64Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
|
|
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value, "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
|
|
{
|
|
using Type = PhysicalDeviceShaderAtomicInt64Features;
|
|
};
|
|
using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
|
|
|
|
struct PhysicalDeviceShaderClockFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupClock = shaderSubgroupClock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDeviceClock = shaderDeviceClock_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSubgroupClock == rhs.shaderSubgroupClock )
|
|
&& ( shaderDeviceClock == rhs.shaderDeviceClock );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value, "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceShaderClockFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderCoreProperties2AMD
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderCoreFeatures == rhs.shaderCoreFeatures )
|
|
&& ( activeComputeUnitCount == rhs.activeComputeUnitCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
|
|
uint32_t activeComputeUnitCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value, "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
|
|
{
|
|
using Type = PhysicalDeviceShaderCoreProperties2AMD;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderCorePropertiesAMD
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderEngineCount, shaderArraysPerEngineCount, computeUnitsPerShaderArray, simdPerComputeUnit, wavefrontsPerSimd, wavefrontSize, sgprsPerSimd, minSgprAllocation, maxSgprAllocation, sgprAllocationGranularity, vgprsPerSimd, minVgprAllocation, maxVgprAllocation, vgprAllocationGranularity );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderEngineCount == rhs.shaderEngineCount )
|
|
&& ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
|
|
&& ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
|
|
&& ( simdPerComputeUnit == rhs.simdPerComputeUnit )
|
|
&& ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
|
|
&& ( wavefrontSize == rhs.wavefrontSize )
|
|
&& ( sgprsPerSimd == rhs.sgprsPerSimd )
|
|
&& ( minSgprAllocation == rhs.minSgprAllocation )
|
|
&& ( maxSgprAllocation == rhs.maxSgprAllocation )
|
|
&& ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
|
|
&& ( vgprsPerSimd == rhs.vgprsPerSimd )
|
|
&& ( minVgprAllocation == rhs.minVgprAllocation )
|
|
&& ( maxVgprAllocation == rhs.maxVgprAllocation )
|
|
&& ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
|
|
void * pNext = {};
|
|
uint32_t shaderEngineCount = {};
|
|
uint32_t shaderArraysPerEngineCount = {};
|
|
uint32_t computeUnitsPerShaderArray = {};
|
|
uint32_t simdPerComputeUnit = {};
|
|
uint32_t wavefrontsPerSimd = {};
|
|
uint32_t wavefrontSize = {};
|
|
uint32_t sgprsPerSimd = {};
|
|
uint32_t minSgprAllocation = {};
|
|
uint32_t maxSgprAllocation = {};
|
|
uint32_t sgprAllocationGranularity = {};
|
|
uint32_t vgprsPerSimd = {};
|
|
uint32_t minVgprAllocation = {};
|
|
uint32_t maxVgprAllocation = {};
|
|
uint32_t vgprAllocationGranularity = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value, "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
|
|
{
|
|
using Type = PhysicalDeviceShaderCorePropertiesAMD;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderDemoteToHelperInvocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value, "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
};
|
|
using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
|
|
|
|
struct PhysicalDeviceShaderDrawParametersFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderDrawParameters( shaderDrawParameters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDrawParameters = shaderDrawParameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderDrawParameters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderDrawParameters == rhs.shaderDrawParameters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value, "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderDrawParametersFeatures;
|
|
};
|
|
using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
|
|
|
|
struct PhysicalDeviceShaderFloat16Int8Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat16 = shaderFloat16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt8 = shaderInt8_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderFloat16, shaderInt8 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderFloat16 == rhs.shaderFloat16 )
|
|
&& ( shaderInt8 == rhs.shaderInt8 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value, "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
|
|
{
|
|
using Type = PhysicalDeviceShaderFloat16Int8Features;
|
|
};
|
|
using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
|
|
using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
|
|
|
|
struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderImageInt64Atomics( shaderImageInt64Atomics_ ), sparseImageInt64Atomics( sparseImageInt64Atomics_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderImageInt64Atomics = shaderImageInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sparseImageInt64Atomics = sparseImageInt64Atomics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics )
|
|
&& ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderImageFootprintFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageFootprint( imageFootprint_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageFootprint = imageFootprint_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageFootprint );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageFootprint == rhs.imageFootprint );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderIntegerDotProductFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderIntegerDotProduct( shaderIntegerDotProduct_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerDotProduct = shaderIntegerDotProduct_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderIntegerDotProduct );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) == sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value, "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderIntegerDotProductFeatures;
|
|
};
|
|
using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures;
|
|
|
|
struct PhysicalDeviceShaderIntegerDotProductProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ), integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ), integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ), integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ), integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ), integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ), integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ), integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ), integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ), integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ), integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ), integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ), integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ), integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ), integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast<PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
|
|
&& ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
|
|
&& ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
|
|
&& ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
|
|
&& ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
|
|
&& ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
|
|
&& ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
|
|
&& ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
|
|
&& ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
|
|
&& ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) == sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value, "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerDotProductProperties>
|
|
{
|
|
using Type = PhysicalDeviceShaderIntegerDotProductProperties;
|
|
};
|
|
using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties;
|
|
|
|
struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderIntegerFunctions2( shaderIntegerFunctions2_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerFunctions2 = shaderIntegerFunctions2_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderIntegerFunctions2 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
|
|
{
|
|
using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderSMBuiltins( shaderSMBuiltins_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSMBuiltins = shaderSMBuiltins_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSMBuiltins );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSMBuiltins == rhs.shaderSMBuiltins );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSMCount == rhs.shaderSMCount )
|
|
&& ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
|
|
void * pNext = {};
|
|
uint32_t shaderSMCount = {};
|
|
uint32_t shaderWarpsPerSM = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSubgroupExtendedTypes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
};
|
|
using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
|
|
|
|
struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderSubgroupUniformControlFlow );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value, "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceShaderTerminateInvocationFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderTerminateInvocation( shaderTerminateInvocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTerminateInvocation = shaderTerminateInvocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderTerminateInvocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) == sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value, "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures>
|
|
{
|
|
using Type = PhysicalDeviceShaderTerminateInvocationFeatures;
|
|
};
|
|
using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures;
|
|
|
|
struct PhysicalDeviceShadingRateImageFeaturesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateImage = shadingRateImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateImage == rhs.shadingRateImage )
|
|
&& ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value, "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
|
|
{
|
|
using Type = PhysicalDeviceShadingRateImageFeaturesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceShadingRateImagePropertiesNV
|
|
{
|
|
using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateTexelSize == rhs.shadingRateTexelSize )
|
|
&& ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
|
|
&& ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
|
|
uint32_t shadingRatePaletteSize = {};
|
|
uint32_t shadingRateMaxCoarseSamples = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value, "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
|
|
{
|
|
using Type = PhysicalDeviceShadingRateImagePropertiesNV;
|
|
};
|
|
|
|
struct PhysicalDeviceSparseImageFormatInfo2
|
|
{
|
|
using NativeType = VkPhysicalDeviceSparseImageFormatInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samples = samples_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
usage = usage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
tiling = tiling_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ImageType const &, VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::ImageTiling const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, type, samples, usage, tiling );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( type == rhs.type )
|
|
&& ( samples == rhs.samples )
|
|
&& ( usage == rhs.usage )
|
|
&& ( tiling == rhs.tiling );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value, "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
|
|
{
|
|
using Type = PhysicalDeviceSparseImageFormatInfo2;
|
|
};
|
|
using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
|
|
|
|
struct PhysicalDeviceSubgroupProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subgroupSize == rhs.subgroupSize )
|
|
&& ( supportedStages == rhs.supportedStages )
|
|
&& ( supportedOperations == rhs.supportedOperations )
|
|
&& ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
|
|
void * pNext = {};
|
|
uint32_t subgroupSize = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupProperties;
|
|
};
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupSizeControl = subgroupSizeControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeFullSubgroups = computeFullSubgroups_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subgroupSizeControl == rhs.subgroupSizeControl )
|
|
&& ( computeFullSubgroups == rhs.computeFullSubgroups );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value, "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeatures>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupSizeControlFeatures;
|
|
};
|
|
using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
|
|
|
|
struct PhysicalDeviceSubgroupSizeControlProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minSubgroupSize == rhs.minSubgroupSize )
|
|
&& ( maxSubgroupSize == rhs.maxSubgroupSize )
|
|
&& ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
|
|
&& ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties;
|
|
void * pNext = {};
|
|
uint32_t minSubgroupSize = {};
|
|
uint32_t maxSubgroupSize = {};
|
|
uint32_t maxComputeWorkgroupSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) == sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value, "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlProperties>
|
|
{
|
|
using Type = PhysicalDeviceSubgroupSizeControlProperties;
|
|
};
|
|
using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties;
|
|
|
|
struct PhysicalDeviceSubpassShadingFeaturesHUAWEI
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subpassShading( subpassShading_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassShading = subpassShading_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingFeaturesHUAWEI*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subpassShading );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subpassShading == rhs.subpassShading );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value, "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI>
|
|
{
|
|
using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
|
|
};
|
|
|
|
struct PhysicalDeviceSubpassShadingPropertiesHUAWEI
|
|
{
|
|
using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast<PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSubpassShadingPropertiesHUAWEI*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI;
|
|
void * pNext = {};
|
|
uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value, "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI>
|
|
{
|
|
using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
|
|
};
|
|
|
|
struct PhysicalDeviceSurfaceInfo2KHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceSurfaceInfo2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: surface( surface_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surface );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( surface == rhs.surface );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
|
|
{
|
|
using Type = PhysicalDeviceSurfaceInfo2KHR;
|
|
};
|
|
|
|
struct PhysicalDeviceSynchronization2Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceSynchronization2Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: synchronization2( synchronization2_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceSynchronization2Features( *reinterpret_cast<PhysicalDeviceSynchronization2Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
synchronization2 = synchronization2_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceSynchronization2Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceSynchronization2Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, synchronization2 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( synchronization2 == rhs.synchronization2 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value, "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2Features>
|
|
{
|
|
using Type = PhysicalDeviceSynchronization2Features;
|
|
};
|
|
using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: texelBufferAlignment( texelBufferAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
texelBufferAlignment = texelBufferAlignment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, texelBufferAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( texelBufferAlignment == rhs.texelBufferAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceTexelBufferAlignmentProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
|
|
&& ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
|
|
&& ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
|
|
&& ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value, "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentProperties>
|
|
{
|
|
using Type = PhysicalDeviceTexelBufferAlignmentProperties;
|
|
};
|
|
using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties;
|
|
|
|
struct PhysicalDeviceTextureCompressionASTCHDRFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, textureCompressionASTC_HDR );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value, "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures>
|
|
{
|
|
using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
};
|
|
using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures;
|
|
|
|
struct PhysicalDeviceTimelineSemaphoreFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: timelineSemaphore( timelineSemaphore_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timelineSemaphore = timelineSemaphore_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, timelineSemaphore );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( timelineSemaphore == rhs.timelineSemaphore );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value, "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
|
|
{
|
|
using Type = PhysicalDeviceTimelineSemaphoreFeatures;
|
|
};
|
|
using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
|
|
|
|
struct PhysicalDeviceTimelineSemaphoreProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
|
|
void * pNext = {};
|
|
uint64_t maxTimelineSemaphoreValueDifference = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value, "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
|
|
{
|
|
using Type = PhysicalDeviceTimelineSemaphoreProperties;
|
|
};
|
|
using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
|
|
|
|
struct PhysicalDeviceToolProperties
|
|
{
|
|
using NativeType = VkPhysicalDeviceToolProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const & layer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceToolProperties( *reinterpret_cast<PhysicalDeviceToolProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceToolProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceToolProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, name, version, purposes, description, layer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceToolProperties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( name == rhs.name )
|
|
&& ( version == rhs.version )
|
|
&& ( purposes == rhs.purposes )
|
|
&& ( description == rhs.description )
|
|
&& ( layer == rhs.layer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
|
|
VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "PhysicalDeviceToolProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceToolProperties>
|
|
{
|
|
using Type = PhysicalDeviceToolProperties;
|
|
};
|
|
using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
|
|
|
|
struct PhysicalDeviceTransformFeedbackFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformFeedback = transformFeedback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
geometryStreams = geometryStreams_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, transformFeedback, geometryStreams );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( transformFeedback == rhs.transformFeedback )
|
|
&& ( geometryStreams == rhs.geometryStreams );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceTransformFeedbackPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxTransformFeedbackStreams, maxTransformFeedbackBuffers, maxTransformFeedbackBufferSize, maxTransformFeedbackStreamDataSize, maxTransformFeedbackBufferDataSize, maxTransformFeedbackBufferDataStride, transformFeedbackQueries, transformFeedbackStreamsLinesTriangles, transformFeedbackRasterizationStreamSelect, transformFeedbackDraw );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
|
|
&& ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
|
|
&& ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
|
|
&& ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
|
|
&& ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
|
|
&& ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
|
|
&& ( transformFeedbackQueries == rhs.transformFeedbackQueries )
|
|
&& ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
|
|
&& ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
|
|
&& ( transformFeedbackDraw == rhs.transformFeedbackDraw );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxTransformFeedbackStreams = {};
|
|
uint32_t maxTransformFeedbackBuffers = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
|
|
uint32_t maxTransformFeedbackStreamDataSize = {};
|
|
uint32_t maxTransformFeedbackBufferDataSize = {};
|
|
uint32_t maxTransformFeedbackBufferDataStride = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceUniformBufferStandardLayoutFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: uniformBufferStandardLayout( uniformBufferStandardLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformBufferStandardLayout = uniformBufferStandardLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, uniformBufferStandardLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
|
|
{
|
|
using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
};
|
|
using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
|
|
|
|
struct PhysicalDeviceVariablePointersFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceVariablePointersFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointersStorageBuffer = variablePointersStorageBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointers = variablePointers_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
|
|
&& ( variablePointers == rhs.variablePointers );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value, "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
|
|
{
|
|
using Type = PhysicalDeviceVariablePointersFeatures;
|
|
};
|
|
using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
|
|
using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
|
|
using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
|
|
|
|
struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVertexAttributeDivisorFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
|
|
&& ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxVertexAttribDivisor( maxVertexAttribDivisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxVertexAttribDivisor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxVertexAttribDivisor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
|
|
{
|
|
using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexInputDynamicState( vertexInputDynamicState_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexInputDynamicState = vertexInputDynamicState_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexInputDynamicState );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vertexInputDynamicState == rhs.vertexInputDynamicState );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value, "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
|
|
};
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoProfileKHR
|
|
{
|
|
using NativeType = VkVideoProfileKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoProfileKHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: videoCodecOperation( videoCodecOperation_ ), chromaSubsampling( chromaSubsampling_ ), lumaBitDepth( lumaBitDepth_ ), chromaBitDepth( chromaBitDepth_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoProfileKHR( *reinterpret_cast<VideoProfileKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoCodecOperation = videoCodecOperation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
chromaSubsampling = chromaSubsampling_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lumaBitDepth = lumaBitDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
chromaBitDepth = chromaBitDepth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoProfileKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoProfileKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoProfileKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( videoCodecOperation == rhs.videoCodecOperation )
|
|
&& ( chromaSubsampling == rhs.chromaSubsampling )
|
|
&& ( lumaBitDepth == rhs.lumaBitDepth )
|
|
&& ( chromaBitDepth == rhs.chromaBitDepth );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid;
|
|
VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
|
|
VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {};
|
|
VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileKHR ) == sizeof( VkVideoProfileKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileKHR>::value, "VideoProfileKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoProfileKHR>
|
|
{
|
|
using Type = VideoProfileKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoProfilesKHR
|
|
{
|
|
using NativeType = VkVideoProfilesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoProfilesKHR(uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: profileCount( profileCount_ ), pProfiles( pProfiles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoProfilesKHR( *reinterpret_cast<VideoProfilesKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoProfilesKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileKHR> const & profiles_ )
|
|
: profileCount( static_cast<uint32_t>( profiles_.size() ) ), pProfiles( profiles_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfilesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
profileCount = profileCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pProfiles = pProfiles_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoProfilesKHR & setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoProfileKHR> const & profiles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
profileCount = static_cast<uint32_t>( profiles_.size() );
|
|
pProfiles = profiles_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoProfilesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoProfilesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoProfileKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, profileCount, pProfiles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoProfilesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( profileCount == rhs.profileCount )
|
|
&& ( pProfiles == rhs.pProfiles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR;
|
|
void * pNext = {};
|
|
uint32_t profileCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfilesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfilesKHR>::value, "VideoProfilesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoProfilesKHR>
|
|
{
|
|
using Type = VideoProfilesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct PhysicalDeviceVideoFormatInfoKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceVideoFormatInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageUsage( imageUsage_ ), pVideoProfiles( pVideoProfiles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageUsage, pVideoProfiles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageUsage == rhs.imageUsage )
|
|
&& ( pVideoProfiles == rhs.pVideoProfiles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
|
|
{
|
|
using Type = PhysicalDeviceVideoFormatInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
struct PhysicalDeviceVulkan11Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan11Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer16BitAccess = storageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant16 = storagePushConstant16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageInputOutput16 = storageInputOutput16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiview = multiview_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewGeometryShader = multiviewGeometryShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
multiviewTessellationShader = multiviewTessellationShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointersStorageBuffer = variablePointersStorageBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
variablePointers = variablePointers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedMemory = protectedMemory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerYcbcrConversion = samplerYcbcrConversion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDrawParameters = shaderDrawParameters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16, multiview, multiviewGeometryShader, multiviewTessellationShader, variablePointersStorageBuffer, variablePointers, protectedMemory, samplerYcbcrConversion, shaderDrawParameters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
|
|
&& ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
|
|
&& ( storagePushConstant16 == rhs.storagePushConstant16 )
|
|
&& ( storageInputOutput16 == rhs.storageInputOutput16 )
|
|
&& ( multiview == rhs.multiview )
|
|
&& ( multiviewGeometryShader == rhs.multiviewGeometryShader )
|
|
&& ( multiviewTessellationShader == rhs.multiviewTessellationShader )
|
|
&& ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
|
|
&& ( variablePointers == rhs.variablePointers )
|
|
&& ( protectedMemory == rhs.protectedMemory )
|
|
&& ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
|
|
&& ( shaderDrawParameters == rhs.shaderDrawParameters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan11Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan11Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan11Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const & deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid, subgroupSize, subgroupSupportedStages, subgroupSupportedOperations, subgroupQuadOperationsInAllStages, pointClippingBehavior, maxMultiviewViewCount, maxMultiviewInstanceIndex, protectedNoFault, maxPerSetDescriptors, maxMemoryAllocationSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( deviceUUID == rhs.deviceUUID )
|
|
&& ( driverUUID == rhs.driverUUID )
|
|
&& ( deviceLUID == rhs.deviceLUID )
|
|
&& ( deviceNodeMask == rhs.deviceNodeMask )
|
|
&& ( deviceLUIDValid == rhs.deviceLUIDValid )
|
|
&& ( subgroupSize == rhs.subgroupSize )
|
|
&& ( subgroupSupportedStages == rhs.subgroupSupportedStages )
|
|
&& ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
|
|
&& ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
|
|
&& ( pointClippingBehavior == rhs.pointClippingBehavior )
|
|
&& ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
|
|
&& ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
|
|
&& ( protectedNoFault == rhs.protectedNoFault )
|
|
&& ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
|
|
&& ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
|
|
uint32_t deviceNodeMask = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
|
|
uint32_t subgroupSize = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
|
|
VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
|
|
VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
|
|
uint32_t maxMultiviewViewCount = {};
|
|
uint32_t maxMultiviewInstanceIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
|
|
uint32_t maxPerSetDescriptors = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan11Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan12Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan12Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
drawIndirectCount = drawIndirectCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storageBuffer8BitAccess = storageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storagePushConstant8 = storagePushConstant8_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderFloat16 = shaderFloat16_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInt8 = shaderInt8_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorIndexing = descriptorIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
runtimeDescriptorArray = runtimeDescriptorArray_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
samplerFilterMinmax = samplerFilterMinmax_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
scalarBlockLayout = scalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imagelessFramebuffer = imagelessFramebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
uniformBufferStandardLayout = uniformBufferStandardLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
separateDepthStencilLayouts = separateDepthStencilLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hostQueryReset = hostQueryReset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
timelineSemaphore = timelineSemaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddress = bufferDeviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModel = vulkanMemoryModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderOutputViewportIndex = shaderOutputViewportIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderOutputLayer = shaderOutputLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, samplerMirrorClampToEdge, drawIndirectCount, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8, shaderBufferInt64Atomics, shaderSharedInt64Atomics, shaderFloat16, shaderInt8, descriptorIndexing, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray, samplerFilterMinmax, scalarBlockLayout, imagelessFramebuffer, uniformBufferStandardLayout, shaderSubgroupExtendedTypes, separateDepthStencilLayouts, hostQueryReset, timelineSemaphore, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains, shaderOutputViewportIndex, shaderOutputLayer, subgroupBroadcastDynamicId );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
|
|
&& ( drawIndirectCount == rhs.drawIndirectCount )
|
|
&& ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
|
|
&& ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
|
|
&& ( storagePushConstant8 == rhs.storagePushConstant8 )
|
|
&& ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
|
|
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
|
|
&& ( shaderFloat16 == rhs.shaderFloat16 )
|
|
&& ( shaderInt8 == rhs.shaderInt8 )
|
|
&& ( descriptorIndexing == rhs.descriptorIndexing )
|
|
&& ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
|
|
&& ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
|
|
&& ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
|
|
&& ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
|
|
&& ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
|
|
&& ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
|
|
&& ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
|
|
&& ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
|
|
&& ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
|
|
&& ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
|
|
&& ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
|
|
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
|
|
&& ( samplerFilterMinmax == rhs.samplerFilterMinmax )
|
|
&& ( scalarBlockLayout == rhs.scalarBlockLayout )
|
|
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer )
|
|
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
|
|
&& ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
|
|
&& ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
|
|
&& ( hostQueryReset == rhs.hostQueryReset )
|
|
&& ( timelineSemaphore == rhs.timelineSemaphore )
|
|
&& ( bufferDeviceAddress == rhs.bufferDeviceAddress )
|
|
&& ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
|
|
&& ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
|
|
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
|
|
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
|
|
&& ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
|
|
&& ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
|
|
&& ( shaderOutputLayer == rhs.shaderOutputLayer )
|
|
&& ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan12Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan12Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan12Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::DriverId const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping, maxTimelineSemaphoreValueDifference, framebufferIntegerColorSampleCounts );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( driverID == rhs.driverID )
|
|
&& ( driverName == rhs.driverName )
|
|
&& ( driverInfo == rhs.driverInfo )
|
|
&& ( conformanceVersion == rhs.conformanceVersion )
|
|
&& ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
|
|
&& ( roundingModeIndependence == rhs.roundingModeIndependence )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
|
|
&& ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
|
|
&& ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
|
|
&& ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
|
|
&& ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
|
|
&& ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
|
|
&& ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
|
|
&& ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
|
|
&& ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
|
|
&& ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
|
|
&& ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
|
|
&& ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
|
|
&& ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
|
|
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
|
|
&& ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
|
|
&& ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
|
|
&& ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
|
|
&& ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
|
|
&& ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
|
|
&& ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
|
|
&& ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
|
|
&& ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
|
|
&& ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
|
|
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
|
|
&& ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
|
|
&& ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
|
|
&& ( independentResolveNone == rhs.independentResolveNone )
|
|
&& ( independentResolve == rhs.independentResolve )
|
|
&& ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
|
|
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
|
|
&& ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
|
|
&& ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
|
|
VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
|
|
uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
|
|
uint32_t maxPerStageUpdateAfterBindResources = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
|
|
uint64_t maxTimelineSemaphoreValueDifference = {};
|
|
VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan12Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan13Features
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan13Features;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: robustImageAccess( robustImageAccess_ ), inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ), pipelineCreationCacheControl( pipelineCreationCacheControl_ ), privateData( privateData_ ), shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ), shaderTerminateInvocation( shaderTerminateInvocation_ ), subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ ), synchronization2( synchronization2_ ), textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ), shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ), dynamicRendering( dynamicRendering_ ), shaderIntegerDotProduct( shaderIntegerDotProduct_ ), maintenance4( maintenance4_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan13Features( *reinterpret_cast<PhysicalDeviceVulkan13Features const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
robustImageAccess = robustImageAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inlineUniformBlock = inlineUniformBlock_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCreationCacheControl = pipelineCreationCacheControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
privateData = privateData_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderTerminateInvocation = shaderTerminateInvocation_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subgroupSizeControl = subgroupSizeControl_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
computeFullSubgroups = computeFullSubgroups_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
synchronization2 = synchronization2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dynamicRendering = dynamicRendering_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderIntegerDotProduct = shaderIntegerDotProduct_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maintenance4 = maintenance4_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Features*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan13Features*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, robustImageAccess, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind, pipelineCreationCacheControl, privateData, shaderDemoteToHelperInvocation, shaderTerminateInvocation, subgroupSizeControl, computeFullSubgroups, synchronization2, textureCompressionASTC_HDR, shaderZeroInitializeWorkgroupMemory, dynamicRendering, shaderIntegerDotProduct, maintenance4 );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( robustImageAccess == rhs.robustImageAccess )
|
|
&& ( inlineUniformBlock == rhs.inlineUniformBlock )
|
|
&& ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind )
|
|
&& ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl )
|
|
&& ( privateData == rhs.privateData )
|
|
&& ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation )
|
|
&& ( shaderTerminateInvocation == rhs.shaderTerminateInvocation )
|
|
&& ( subgroupSizeControl == rhs.subgroupSizeControl )
|
|
&& ( computeFullSubgroups == rhs.computeFullSubgroups )
|
|
&& ( synchronization2 == rhs.synchronization2 )
|
|
&& ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR )
|
|
&& ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory )
|
|
&& ( dynamicRendering == rhs.dynamicRendering )
|
|
&& ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct )
|
|
&& ( maintenance4 == rhs.maintenance4 );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Features>
|
|
{
|
|
using Type = PhysicalDeviceVulkan13Features;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkan13Properties
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkan13Properties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxInlineUniformTotalSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ), maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ), maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ), integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ), integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ), integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ), integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ), integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ), integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ), integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ), integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ), integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ), integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ), integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ), integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ), integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ), integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ), integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ), storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ), maxBufferSize( maxBufferSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkan13Properties( *reinterpret_cast<PhysicalDeviceVulkan13Properties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkan13Properties*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkan13Properties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks, maxInlineUniformTotalSize, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment, maxBufferSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minSubgroupSize == rhs.minSubgroupSize )
|
|
&& ( maxSubgroupSize == rhs.maxSubgroupSize )
|
|
&& ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
|
|
&& ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages )
|
|
&& ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
|
|
&& ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
|
|
&& ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
|
|
&& ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
|
|
&& ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks )
|
|
&& ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize )
|
|
&& ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated )
|
|
&& ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated )
|
|
&& ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated )
|
|
&& ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated )
|
|
&& ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated )
|
|
&& ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated )
|
|
&& ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated )
|
|
&& ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated )
|
|
&& ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated )
|
|
&& ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated )
|
|
&& ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated )
|
|
&& ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated )
|
|
&& ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated )
|
|
&& ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
|
|
&& ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
|
|
&& ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
|
|
&& ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment )
|
|
&& ( maxBufferSize == rhs.maxBufferSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties;
|
|
void * pNext = {};
|
|
uint32_t minSubgroupSize = {};
|
|
uint32_t maxSubgroupSize = {};
|
|
uint32_t maxComputeWorkgroupSubgroups = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
|
|
uint32_t maxInlineUniformBlockSize = {};
|
|
uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
|
|
uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetInlineUniformBlocks = {};
|
|
uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
|
|
uint32_t maxInlineUniformTotalSize = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan13Properties>
|
|
{
|
|
using Type = PhysicalDeviceVulkan13Properties;
|
|
};
|
|
|
|
struct PhysicalDeviceVulkanMemoryModelFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModel = vulkanMemoryModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
|
|
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
|
|
&& ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value, "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
|
|
{
|
|
using Type = PhysicalDeviceVulkanMemoryModelFeatures;
|
|
};
|
|
using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
|
|
|
|
struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
|
|
{
|
|
using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ), workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ), workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ), workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, workgroupMemoryExplicitLayout, workgroupMemoryExplicitLayoutScalarBlockLayout, workgroupMemoryExplicitLayout8BitAccess, workgroupMemoryExplicitLayout16BitAccess );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout )
|
|
&& ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout )
|
|
&& ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess )
|
|
&& ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
|
|
{
|
|
using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
|
|
};
|
|
|
|
struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: ycbcr2plane444Formats( ycbcr2plane444Formats_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcr2plane444Formats = ycbcr2plane444Formats_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, ycbcr2plane444Formats );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value, "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
|
|
{
|
|
using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: ycbcrImageArrays( ycbcrImageArrays_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrImageArrays = ycbcrImageArrays_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, ycbcrImageArrays );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ycbcrImageArrays == rhs.ycbcrImageArrays );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
|
|
{
|
|
using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
|
|
};
|
|
|
|
struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures
|
|
{
|
|
using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
|
|
}
|
|
|
|
explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default;
|
|
#else
|
|
bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) == sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value, "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>
|
|
{
|
|
using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
};
|
|
using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
|
|
|
|
struct PipelineCacheCreateInfo
|
|
{
|
|
using NativeType = VkPipelineCacheCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
|
|
: flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialDataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInitialData = pInitialData_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialData_.size() * sizeof(T);
|
|
pInitialData = initialData_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( initialDataSize == rhs.initialDataSize )
|
|
&& ( pInitialData == rhs.pInitialData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
|
|
size_t initialDataSize = {};
|
|
const void * pInitialData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "PipelineCacheCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
|
|
{
|
|
using Type = PipelineCacheCreateInfo;
|
|
};
|
|
|
|
struct PipelineCacheHeaderVersionOne
|
|
{
|
|
using NativeType = VkPipelineCacheHeaderVersionOne;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const & pipelineCacheUUID_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: headerSize( headerSize_ ), headerVersion( headerVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), pipelineCacheUUID( pipelineCacheUUID_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCacheHeaderVersionOne( *reinterpret_cast<PipelineCacheHeaderVersionOne const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
headerSize = headerSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
headerVersion = headerVersion_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vendorID = vendorID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceID = deviceID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array<uint8_t,VK_UUID_SIZE> pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineCacheUUID = pipelineCacheUUID_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCacheHeaderVersionOne*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCacheHeaderVersionOne*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( headerSize == rhs.headerSize )
|
|
&& ( headerVersion == rhs.headerVersion )
|
|
&& ( vendorID == rhs.vendorID )
|
|
&& ( deviceID == rhs.deviceID )
|
|
&& ( pipelineCacheUUID == rhs.pipelineCacheUUID );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t headerSize = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne;
|
|
uint32_t vendorID = {};
|
|
uint32_t deviceID = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineColorBlendAdvancedStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated) VULKAN_HPP_NOEXCEPT
|
|
: srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcPremultiplied = srcPremultiplied_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstPremultiplied = dstPremultiplied_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
blendOverlap = blendOverlap_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::BlendOverlapEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcPremultiplied == rhs.srcPremultiplied )
|
|
&& ( dstPremultiplied == rhs.dstPremultiplied )
|
|
&& ( blendOverlap == rhs.blendOverlap );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
|
|
VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineColorWriteCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineColorWriteCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachmentCount( attachmentCount_ ), pColorWriteEnables( pColorWriteEnables_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ )
|
|
: attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorWriteEnables = pColorWriteEnables_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineColorWriteCreateInfoEXT & setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( colorWriteEnables_.size() );
|
|
pColorWriteEnables = colorWriteEnables_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Bool32 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentCount, pColorWriteEnables );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pColorWriteEnables == rhs.pColorWriteEnables );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
|
|
{
|
|
using Type = PipelineColorWriteCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineCompilerControlCreateInfoAMD
|
|
{
|
|
using NativeType = VkPipelineCompilerControlCreateInfoAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: compilerControlFlags( compilerControlFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compilerControlFlags = compilerControlFlags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, compilerControlFlags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( compilerControlFlags == rhs.compilerControlFlags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value, "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
|
|
{
|
|
using Type = PipelineCompilerControlCreateInfoAMD;
|
|
};
|
|
|
|
struct PipelineCoverageModulationStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineCoverageModulationStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float * pCoverageModulationTable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
|
|
: flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageModulationMode = coverageModulationMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageModulationTableEnable = coverageModulationTableEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageModulationTableCount = coverageModulationTableCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCoverageModulationTable = pCoverageModulationTable_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
|
|
pCoverageModulationTable = coverageModulationTable_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const float * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( coverageModulationMode == rhs.coverageModulationMode )
|
|
&& ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
|
|
&& ( coverageModulationTableCount == rhs.coverageModulationTableCount )
|
|
&& ( pCoverageModulationTable == rhs.pCoverageModulationTable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
|
|
VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
|
|
VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
|
|
uint32_t coverageModulationTableCount = {};
|
|
const float * pCoverageModulationTable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value, "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineCoverageModulationStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineCoverageReductionStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineCoverageReductionStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), coverageReductionMode( coverageReductionMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageReductionMode = coverageReductionMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, coverageReductionMode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( coverageReductionMode == rhs.coverageReductionMode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
|
|
VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value, "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineCoverageReductionStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineCoverageToColorStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineCoverageToColorStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageToColorEnable = coverageToColorEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
coverageToColorLocation = coverageToColorLocation_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( coverageToColorEnable == rhs.coverageToColorEnable )
|
|
&& ( coverageToColorLocation == rhs.coverageToColorLocation );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
|
|
uint32_t coverageToColorLocation = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value, "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineCoverageToColorStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineCreationFeedback
|
|
{
|
|
using NativeType = VkPipelineCreationFeedback;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), duration( duration_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCreationFeedback( *reinterpret_cast<PipelineCreationFeedback const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCreationFeedback*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCreationFeedback*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags, duration );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCreationFeedback const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( flags == rhs.flags )
|
|
&& ( duration == rhs.duration );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {};
|
|
uint64_t duration = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "PipelineCreationFeedback is not nothrow_move_constructible!" );
|
|
using PipelineCreationFeedbackEXT = PipelineCreationFeedback;
|
|
|
|
struct PipelineCreationFeedbackCreateInfo
|
|
{
|
|
using NativeType = VkPipelineCreationFeedbackCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineCreationFeedbackCreateInfo( *reinterpret_cast<PipelineCreationFeedbackCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ )
|
|
: pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelineCreationFeedback = pPipelineCreationFeedback_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
|
|
pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
|
|
&& ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
|
|
&& ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {};
|
|
uint32_t pipelineStageCreationFeedbackCount = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value, "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfo>
|
|
{
|
|
using Type = PipelineCreationFeedbackCreateInfo;
|
|
};
|
|
using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
|
|
|
|
struct PipelineDiscardRectangleStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
|
|
: flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleMode = discardRectangleMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleCount = discardRectangleCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDiscardRectangles = pDiscardRectangles_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
|
|
pDiscardRectangles = discardRectangles_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( discardRectangleMode == rhs.discardRectangleMode )
|
|
&& ( discardRectangleCount == rhs.discardRectangleCount )
|
|
&& ( pDiscardRectangles == rhs.pDiscardRectangles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
|
|
uint32_t discardRectangleCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value, "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineDiscardRectangleStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineExecutableInfoKHR
|
|
{
|
|
using NativeType = VkPipelineExecutableInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipeline( pipeline_ ), executableIndex( executableIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipeline = pipeline_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
executableIndex = executableIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipeline, executableIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineExecutableInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipeline == rhs.pipeline )
|
|
&& ( executableIndex == rhs.executableIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
|
|
uint32_t executableIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "PipelineExecutableInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
|
|
{
|
|
using Type = PipelineExecutableInfoKHR;
|
|
};
|
|
|
|
struct PipelineExecutableInternalRepresentationKHR
|
|
{
|
|
using NativeType = VkPipelineExecutableInternalRepresentationKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void * pData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
|
|
: name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::Bool32 const &, size_t const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, name, description, isText, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( name == rhs.name )
|
|
&& ( description == rhs.description )
|
|
&& ( isText == rhs.isText )
|
|
&& ( dataSize == rhs.dataSize )
|
|
&& ( pData == rhs.pData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 isText = {};
|
|
size_t dataSize = {};
|
|
void * pData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
|
|
{
|
|
using Type = PipelineExecutableInternalRepresentationKHR;
|
|
};
|
|
|
|
struct PipelineExecutablePropertiesKHR
|
|
{
|
|
using NativeType = VkPipelineExecutablePropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, uint32_t subgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stages, name, description, subgroupSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( stages == rhs.stages )
|
|
&& ( name == rhs.name )
|
|
&& ( description == rhs.description )
|
|
&& ( subgroupSize == rhs.subgroupSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
uint32_t subgroupSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
|
|
{
|
|
using Type = PipelineExecutablePropertiesKHR;
|
|
};
|
|
|
|
union PipelineExecutableStatisticValueKHR
|
|
{
|
|
using NativeType = VkPipelineExecutableStatisticValueKHR;
|
|
#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
|
|
: b32( b32_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ )
|
|
: i64( i64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ )
|
|
: u64( u64_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ )
|
|
: f64( f64_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/
|
|
|
|
#if !defined( VULKAN_HPP_NO_UNION_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
b32 = b32_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
i64 = i64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
u64 = u64_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
f64 = f64_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_UNION_SETTERS*/
|
|
|
|
operator VkPipelineExecutableStatisticValueKHR const &() const
|
|
{
|
|
return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>( this );
|
|
}
|
|
|
|
operator VkPipelineExecutableStatisticValueKHR &()
|
|
{
|
|
return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>( this );
|
|
}
|
|
|
|
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
|
|
VULKAN_HPP_NAMESPACE::Bool32 b32;
|
|
int64_t i64;
|
|
uint64_t u64;
|
|
double f64;
|
|
#else
|
|
VkBool32 b32;
|
|
int64_t i64;
|
|
uint64_t u64;
|
|
double f64;
|
|
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
|
|
|
|
};
|
|
|
|
struct PipelineExecutableStatisticKHR
|
|
{
|
|
using NativeType = VkPipelineExecutableStatisticKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const & name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const & description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: name( name_ ), description( description_ ), format( format_ ), value( value_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, name, description, format, value );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
|
|
VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
|
|
{
|
|
using Type = PipelineExecutableStatisticKHR;
|
|
};
|
|
|
|
struct PipelineFragmentShadingRateEnumStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateType( shadingRateType_ ), shadingRate( shadingRate_ ), combinerOps( combinerOps_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateType = shadingRateType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRate = shadingRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
combinerOps = combinerOps_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV const &, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateType == rhs.shadingRateType )
|
|
&& ( shadingRate == rhs.shadingRate )
|
|
&& ( combinerOps == rhs.combinerOps );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
|
|
VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) == sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value, "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineFragmentShadingRateStateCreateInfoKHR
|
|
{
|
|
using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }) VULKAN_HPP_NOEXCEPT
|
|
: fragmentSize( fragmentSize_ ), combinerOps( combinerOps_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentSize = fragmentSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
combinerOps = combinerOps_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentSize, combinerOps );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentSize == rhs.fragmentSize )
|
|
&& ( combinerOps == rhs.combinerOps );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) == sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value, "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
|
|
{
|
|
using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
|
|
};
|
|
|
|
struct PipelineInfoKHR
|
|
{
|
|
using NativeType = VkPipelineInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pipeline( pipeline_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipeline = pipeline_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Pipeline const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pipeline );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pipeline == rhs.pipeline );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "PipelineInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineInfoKHR>
|
|
{
|
|
using Type = PipelineInfoKHR;
|
|
};
|
|
|
|
struct PushConstantRange
|
|
{
|
|
using NativeType = VkPushConstantRange;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stageFlags( stageFlags_ ), offset( offset_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageFlags = stageFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPushConstantRange*>( this );
|
|
}
|
|
|
|
explicit operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPushConstantRange*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( stageFlags, offset, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PushConstantRange const & ) const = default;
|
|
#else
|
|
bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( stageFlags == rhs.stageFlags )
|
|
&& ( offset == rhs.offset )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
|
|
uint32_t offset = {};
|
|
uint32_t size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "PushConstantRange is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineLayoutCreateInfo
|
|
{
|
|
using NativeType = VkPipelineLayoutCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {} )
|
|
: flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
setLayoutCount = setLayoutCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetLayouts = pSetLayouts_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
|
|
pSetLayouts = setLayouts_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushConstantRangeCount = pushConstantRangeCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPushConstantRanges = pPushConstantRanges_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
|
|
pPushConstantRanges = pushConstantRanges_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PushConstantRange * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( setLayoutCount == rhs.setLayoutCount )
|
|
&& ( pSetLayouts == rhs.pSetLayouts )
|
|
&& ( pushConstantRangeCount == rhs.pushConstantRangeCount )
|
|
&& ( pPushConstantRanges == rhs.pPushConstantRanges );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
|
|
uint32_t setLayoutCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {};
|
|
uint32_t pushConstantRangeCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "PipelineLayoutCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
|
|
{
|
|
using Type = PipelineLayoutCreateInfo;
|
|
};
|
|
|
|
struct PipelineLibraryCreateInfoKHR
|
|
{
|
|
using NativeType = VkPipelineLibraryCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: libraryCount( libraryCount_ ), pLibraries( pLibraries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
|
|
: libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
libraryCount = libraryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLibraries = pLibraries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
libraryCount = static_cast<uint32_t>( libraries_.size() );
|
|
pLibraries = libraries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Pipeline * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, libraryCount, pLibraries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( libraryCount == rhs.libraryCount )
|
|
&& ( pLibraries == rhs.pLibraries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t libraryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
|
|
{
|
|
using Type = PipelineLibraryCreateInfoKHR;
|
|
};
|
|
|
|
struct PipelineRasterizationConservativeStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conservativeRasterizationMode = conservativeRasterizationMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
|
|
&& ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
|
|
float extraPrimitiveOverestimationSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value, "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRasterizationDepthClipStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), depthClipEnable( depthClipEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthClipEnable = depthClipEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, depthClipEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( depthClipEnable == rhs.depthClipEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRasterizationLineStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lineRasterizationMode = lineRasterizationMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stippledLineEnable = stippledLineEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lineStippleFactor = lineStippleFactor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
lineStipplePattern = lineStipplePattern_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint16_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( lineRasterizationMode == rhs.lineRasterizationMode )
|
|
&& ( stippledLineEnable == rhs.stippledLineEnable )
|
|
&& ( lineStippleFactor == rhs.lineStippleFactor )
|
|
&& ( lineStipplePattern == rhs.lineStipplePattern );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
|
|
VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
|
|
uint32_t lineStippleFactor = {};
|
|
uint16_t lineStipplePattern = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value, "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationLineStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRasterizationProvokingVertexStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex) VULKAN_HPP_NOEXCEPT
|
|
: provokingVertexMode( provokingVertexMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
provokingVertexMode = provokingVertexMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, provokingVertexMode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( provokingVertexMode == rhs.provokingVertexMode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value, "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRasterizationStateRasterizationOrderAMD
|
|
{
|
|
using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict) VULKAN_HPP_NOEXCEPT
|
|
: rasterizationOrder( rasterizationOrder_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationOrder = rasterizationOrder_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RasterizationOrderAMD const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, rasterizationOrder );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( rasterizationOrder == rhs.rasterizationOrder );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value, "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
|
|
{
|
|
using Type = PipelineRasterizationStateRasterizationOrderAMD;
|
|
};
|
|
|
|
struct PipelineRasterizationStateStreamCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), rasterizationStream( rasterizationStream_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rasterizationStream = rasterizationStream_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, rasterizationStream );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( rasterizationStream == rhs.rasterizationStream );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
|
|
uint32_t rasterizationStream = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value, "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
|
|
{
|
|
using Type = PipelineRasterizationStateStreamCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineRenderingCreateInfo
|
|
{
|
|
using NativeType = VkPipelineRenderingCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachmentFormats( pColorAttachmentFormats_ ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRenderingCreateInfo( *reinterpret_cast<PipelineRenderingCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineRenderingCreateInfo( uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined )
|
|
: viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachmentFormats_.size() ) ), pColorAttachmentFormats( colorAttachmentFormats_.data() ), depthAttachmentFormat( depthAttachmentFormat_ ), stencilAttachmentFormat( stencilAttachmentFormat_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachmentFormats = pColorAttachmentFormats_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineRenderingCreateInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachmentFormats_.size() );
|
|
pColorAttachmentFormats = colorAttachmentFormats_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthAttachmentFormat = depthAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilAttachmentFormat = stencilAttachmentFormat_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRenderingCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRenderingCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Format * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Format const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRenderingCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( viewMask == rhs.viewMask )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachmentFormats == rhs.pColorAttachmentFormats )
|
|
&& ( depthAttachmentFormat == rhs.depthAttachmentFormat )
|
|
&& ( stencilAttachmentFormat == rhs.stencilAttachmentFormat );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {};
|
|
VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "PipelineRenderingCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRenderingCreateInfo>
|
|
{
|
|
using Type = PipelineRenderingCreateInfo;
|
|
};
|
|
using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
|
|
|
|
struct PipelineRepresentativeFragmentTestStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: representativeFragmentTestEnable( representativeFragmentTestEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
representativeFragmentTestEnable = representativeFragmentTestEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, representativeFragmentTestEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineSampleLocationsStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsEnable = sampleLocationsEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleLocationsEnable == rhs.sampleLocationsEnable )
|
|
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value, "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineSampleLocationsStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineShaderStageRequiredSubgroupSizeCreateInfo
|
|
{
|
|
using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(uint32_t requiredSubgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: requiredSubgroupSize( requiredSubgroupSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, requiredSubgroupSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( requiredSubgroupSize == rhs.requiredSubgroupSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
void * pNext = {};
|
|
uint32_t requiredSubgroupSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value, "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo>
|
|
{
|
|
using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
};
|
|
using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
|
|
|
|
struct PipelineTessellationDomainOriginStateCreateInfo
|
|
{
|
|
using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft) VULKAN_HPP_NOEXCEPT
|
|
: domainOrigin( domainOrigin_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
domainOrigin = domainOrigin_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::TessellationDomainOrigin const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, domainOrigin );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( domainOrigin == rhs.domainOrigin );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value, "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
|
|
{
|
|
using Type = PipelineTessellationDomainOriginStateCreateInfo;
|
|
};
|
|
using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
|
|
|
|
struct VertexInputBindingDivisorDescriptionEXT
|
|
{
|
|
using NativeType = VkVertexInputBindingDivisorDescriptionEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ ), divisor( divisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputBindingDivisorDescriptionEXT( *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
divisor = divisor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( binding, divisor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( binding == rhs.binding )
|
|
&& ( divisor == rhs.divisor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t binding = {};
|
|
uint32_t divisor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value, "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineVertexInputDivisorStateCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineVertexInputDivisorStateCreateInfoEXT( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
|
|
: vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDivisorCount = vertexBindingDivisorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVertexBindingDivisors = pVertexBindingDivisors_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
|
|
pVertexBindingDivisors = vertexBindingDivisors_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
|
|
&& ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t vertexBindingDivisorCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value, "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
|
|
{
|
|
using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ )
|
|
: sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleOrderType = sampleOrderType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customSampleOrderCount = customSampleOrderCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCustomSampleOrders = pCustomSampleOrders_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
|
|
pCustomSampleOrders = customSampleOrders_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sampleOrderType == rhs.sampleOrderType )
|
|
&& ( customSampleOrderCount == rhs.customSampleOrderCount )
|
|
&& ( pCustomSampleOrders == rhs.pCustomSampleOrders );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
|
|
uint32_t customSampleOrderCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
|
|
};
|
|
|
|
struct PipelineViewportDepthClipControlCreateInfoEXT
|
|
{
|
|
using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: negativeOneToOne( negativeOneToOne_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast<PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
negativeOneToOne = negativeOneToOne_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportDepthClipControlCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, negativeOneToOne );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( negativeOneToOne == rhs.negativeOneToOne );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) == sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value, "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportDepthClipControlCreateInfoEXT>
|
|
{
|
|
using Type = PipelineViewportDepthClipControlCreateInfoEXT;
|
|
};
|
|
|
|
struct PipelineViewportExclusiveScissorStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
|
|
: exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
exclusiveScissorCount = exclusiveScissorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pExclusiveScissors = pExclusiveScissors_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
|
|
pExclusiveScissors = exclusiveScissors_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Rect2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( exclusiveScissorCount == rhs.exclusiveScissorCount )
|
|
&& ( pExclusiveScissors == rhs.pExclusiveScissors );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t exclusiveScissorCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
|
|
};
|
|
|
|
struct ShadingRatePaletteNV
|
|
{
|
|
using NativeType = VkShadingRatePaletteNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
|
|
: shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
|
|
pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
|
|
}
|
|
|
|
explicit operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ShadingRatePaletteNV const & ) const = default;
|
|
#else
|
|
bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
|
|
&& ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t shadingRatePaletteEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "ShadingRatePaletteNV is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineViewportShadingRateImageStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ )
|
|
: shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateImageEnable = shadingRateImageEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = viewportCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pShadingRatePalettes = pShadingRatePalettes_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
|
|
pShadingRatePalettes = shadingRatePalettes_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( shadingRateImageEnable == rhs.shadingRateImageEnable )
|
|
&& ( viewportCount == rhs.viewportCount )
|
|
&& ( pShadingRatePalettes == rhs.pShadingRatePalettes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
|
|
uint32_t viewportCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value, "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
|
|
};
|
|
|
|
struct ViewportSwizzleNV
|
|
{
|
|
using NativeType = VkViewportSwizzleNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
|
|
: x( x_ ), y( y_ ), z( z_ ), w( w_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
x = x_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
y = y_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
z = z_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
w = w_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
|
|
}
|
|
|
|
explicit operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkViewportSwizzleNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( x, y, z, w );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ViewportSwizzleNV const & ) const = default;
|
|
#else
|
|
bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( x == rhs.x )
|
|
&& ( y == rhs.y )
|
|
&& ( z == rhs.z )
|
|
&& ( w == rhs.w );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
|
|
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
|
|
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
|
|
VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "ViewportSwizzleNV is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineViewportSwizzleStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ )
|
|
: flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = viewportCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewportSwizzles = pViewportSwizzles_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
|
|
pViewportSwizzles = viewportSwizzles_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( viewportCount == rhs.viewportCount )
|
|
&& ( pViewportSwizzles == rhs.pViewportSwizzles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
|
|
uint32_t viewportCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value, "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineViewportSwizzleStateCreateInfoNV;
|
|
};
|
|
|
|
struct ViewportWScalingNV
|
|
{
|
|
using NativeType = VkViewportWScalingNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
xcoeff = xcoeff_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycoeff = ycoeff_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkViewportWScalingNV*>( this );
|
|
}
|
|
|
|
explicit operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkViewportWScalingNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<float const &, float const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( xcoeff, ycoeff );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ViewportWScalingNV const & ) const = default;
|
|
#else
|
|
bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( xcoeff == rhs.xcoeff )
|
|
&& ( ycoeff == rhs.ycoeff );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
float xcoeff = {};
|
|
float ycoeff = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "ViewportWScalingNV is not nothrow_move_constructible!" );
|
|
|
|
struct PipelineViewportWScalingStateCreateInfoNV
|
|
{
|
|
using NativeType = VkPipelineViewportWScalingStateCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ )
|
|
: viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportWScalingEnable = viewportWScalingEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = viewportCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewportWScalings = pViewportWScalings_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
|
|
pViewportWScalings = viewportWScalings_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( viewportWScalingEnable == rhs.viewportWScalingEnable )
|
|
&& ( viewportCount == rhs.viewportCount )
|
|
&& ( pViewportWScalings == rhs.pViewportWScalings );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
|
|
uint32_t viewportCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value, "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
|
|
{
|
|
using Type = PipelineViewportWScalingStateCreateInfoNV;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_GGP )
|
|
struct PresentFrameTokenGGP
|
|
{
|
|
using NativeType = VkPresentFrameTokenGGP;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: frameToken( frameToken_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameToken = frameToken_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, GgpFrameToken const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, frameToken );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
|
|
const void * pNext = {};
|
|
GgpFrameToken frameToken = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "PresentFrameTokenGGP is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
|
|
{
|
|
using Type = PresentFrameTokenGGP;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_GGP*/
|
|
|
|
struct PresentIdKHR
|
|
{
|
|
using NativeType = VkPresentIdKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentIdKHR(uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchainCount( swapchainCount_ ), pPresentIds( pPresentIds_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentIdKHR( *reinterpret_cast<PresentIdKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ )
|
|
: swapchainCount( static_cast<uint32_t>( presentIds_.size() ) ), pPresentIds( presentIds_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentIdKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPresentIds = pPresentIds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & presentIds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( presentIds_.size() );
|
|
pPresentIds = presentIds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentIdKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentIdKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pPresentIds );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentIdKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchainCount == rhs.swapchainCount )
|
|
&& ( pPresentIds == rhs.pPresentIds );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const uint64_t * pPresentIds = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "PresentIdKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentIdKHR>
|
|
{
|
|
using Type = PresentIdKHR;
|
|
};
|
|
|
|
struct PresentInfoKHR
|
|
{
|
|
using NativeType = VkPresentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, const uint32_t * pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result * pResults_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {} )
|
|
: waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
|
|
VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
|
|
VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
|
|
#else
|
|
if ( swapchains_.size() != imageIndices_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
|
|
}
|
|
if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
|
|
}
|
|
if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphores = pWaitSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
|
|
pWaitSemaphores = waitSemaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSwapchains = pSwapchains_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( swapchains_.size() );
|
|
pSwapchains = swapchains_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageIndices = pImageIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
|
|
pImageIndices = imageIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResults = pResults_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( results_.size() );
|
|
pResults = results_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SwapchainKHR * const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::Result * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
|
|
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
|
|
&& ( swapchainCount == rhs.swapchainCount )
|
|
&& ( pSwapchains == rhs.pSwapchains )
|
|
&& ( pImageIndices == rhs.pImageIndices )
|
|
&& ( pResults == rhs.pResults );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
|
|
uint32_t swapchainCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {};
|
|
const uint32_t * pImageIndices = {};
|
|
VULKAN_HPP_NAMESPACE::Result * pResults = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "PresentInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentInfoKHR>
|
|
{
|
|
using Type = PresentInfoKHR;
|
|
};
|
|
|
|
struct RectLayerKHR
|
|
{
|
|
using NativeType = VkRectLayerKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: offset( offset_ ), extent( extent_ ), layer( layer_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} )
|
|
: offset( rect2D.offset )
|
|
, extent( rect2D.extent )
|
|
, layer( layer_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
extent = extent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layer = layer_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRectLayerKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRectLayerKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( offset, extent, layer );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RectLayerKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( offset == rhs.offset )
|
|
&& ( extent == rhs.extent )
|
|
&& ( layer == rhs.layer );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Offset2D offset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D extent = {};
|
|
uint32_t layer = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "RectLayerKHR is not nothrow_move_constructible!" );
|
|
|
|
struct PresentRegionKHR
|
|
{
|
|
using NativeType = VkPresentRegionKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
|
|
: rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangleCount = rectangleCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRectangles = pRectangles_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rectangleCount = static_cast<uint32_t>( rectangles_.size() );
|
|
pRectangles = rectangles_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentRegionKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentRegionKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, const VULKAN_HPP_NAMESPACE::RectLayerKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( rectangleCount, pRectangles );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentRegionKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( rectangleCount == rhs.rectangleCount )
|
|
&& ( pRectangles == rhs.pRectangles );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t rectangleCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "PresentRegionKHR is not nothrow_move_constructible!" );
|
|
|
|
struct PresentRegionsKHR
|
|
{
|
|
using NativeType = VkPresentRegionsKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchainCount( swapchainCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
|
|
: swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentRegionsKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentRegionsKHR const & ) const = default;
|
|
#else
|
|
bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchainCount == rhs.swapchainCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "PresentRegionsKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentRegionsKHR>
|
|
{
|
|
using Type = PresentRegionsKHR;
|
|
};
|
|
|
|
struct PresentTimeGOOGLE
|
|
{
|
|
using NativeType = VkPresentTimeGOOGLE;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
presentID = presentID_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
desiredPresentTime = desiredPresentTime_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( presentID, desiredPresentTime );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentTimeGOOGLE const & ) const = default;
|
|
#else
|
|
bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( presentID == rhs.presentID )
|
|
&& ( desiredPresentTime == rhs.desiredPresentTime );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t presentID = {};
|
|
uint64_t desiredPresentTime = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "PresentTimeGOOGLE is not nothrow_move_constructible!" );
|
|
|
|
struct PresentTimesInfoGOOGLE
|
|
{
|
|
using NativeType = VkPresentTimesInfoGOOGLE;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: swapchainCount( swapchainCount_ ), pTimes( pTimes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
|
|
: swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = swapchainCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTimes = pTimes_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
swapchainCount = static_cast<uint32_t>( times_.size() );
|
|
pTimes = times_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
|
|
}
|
|
|
|
explicit operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, swapchainCount, pTimes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default;
|
|
#else
|
|
bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( swapchainCount == rhs.swapchainCount )
|
|
&& ( pTimes == rhs.pTimes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
|
|
const void * pNext = {};
|
|
uint32_t swapchainCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
|
|
{
|
|
using Type = PresentTimesInfoGOOGLE;
|
|
};
|
|
|
|
struct PrivateDataSlotCreateInfo
|
|
{
|
|
using NativeType = VkPrivateDataSlotCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: PrivateDataSlotCreateInfo( *reinterpret_cast<PrivateDataSlotCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkPrivateDataSlotCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkPrivateDataSlotCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfo>
|
|
{
|
|
using Type = PrivateDataSlotCreateInfo;
|
|
};
|
|
using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
|
|
|
|
struct ProtectedSubmitInfo
|
|
{
|
|
using NativeType = VkProtectedSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: protectedSubmit( protectedSubmit_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
protectedSubmit = protectedSubmit_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, protectedSubmit );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ProtectedSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( protectedSubmit == rhs.protectedSubmit );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "ProtectedSubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
|
|
{
|
|
using Type = ProtectedSubmitInfo;
|
|
};
|
|
|
|
struct QueryPoolCreateInfo
|
|
{
|
|
using NativeType = VkQueryPoolCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryType = queryType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queryCount = queryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineStatistics = pipelineStatistics_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags const &, VULKAN_HPP_NAMESPACE::QueryType const &, uint32_t const &, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueryPoolCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( queryType == rhs.queryType )
|
|
&& ( queryCount == rhs.queryCount )
|
|
&& ( pipelineStatistics == rhs.pipelineStatistics );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
|
|
uint32_t queryCount = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "QueryPoolCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
|
|
{
|
|
using Type = QueryPoolCreateInfo;
|
|
};
|
|
|
|
struct QueryPoolPerformanceCreateInfoKHR
|
|
{
|
|
using NativeType = VkQueryPoolPerformanceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t * pCounterIndices_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
|
|
: queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterIndexCount = counterIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCounterIndices = pCounterIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
|
|
pCounterIndices = counterIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
|
|
&& ( counterIndexCount == rhs.counterIndexCount )
|
|
&& ( pCounterIndices == rhs.pCounterIndices );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
uint32_t counterIndexCount = {};
|
|
const uint32_t * pCounterIndices = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
|
|
{
|
|
using Type = QueryPoolPerformanceCreateInfoKHR;
|
|
};
|
|
|
|
struct QueryPoolPerformanceQueryCreateInfoINTEL
|
|
{
|
|
using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual) VULKAN_HPP_NOEXCEPT
|
|
: performanceCountersSampling( performanceCountersSampling_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
performanceCountersSampling = performanceCountersSampling_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
|
|
}
|
|
|
|
explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, performanceCountersSampling );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default;
|
|
#else
|
|
bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( performanceCountersSampling == rhs.performanceCountersSampling );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value, "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
|
|
{
|
|
using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
|
|
};
|
|
using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
|
|
|
|
struct QueueFamilyCheckpointProperties2NV
|
|
{
|
|
using NativeType = VkQueueFamilyCheckpointProperties2NV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: checkpointExecutionStageMask( checkpointExecutionStageMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, checkpointExecutionStageMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value, "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
|
|
{
|
|
using Type = QueueFamilyCheckpointProperties2NV;
|
|
};
|
|
|
|
struct QueueFamilyCheckpointPropertiesNV
|
|
{
|
|
using NativeType = VkQueueFamilyCheckpointPropertiesNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: checkpointExecutionStageMask( checkpointExecutionStageMask_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, checkpointExecutionStageMask );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
|
|
{
|
|
using Type = QueueFamilyCheckpointPropertiesNV;
|
|
};
|
|
|
|
struct QueueFamilyGlobalPriorityPropertiesKHR
|
|
{
|
|
using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR(uint32_t priorityCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR,VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const & priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }) VULKAN_HPP_NOEXCEPT
|
|
: priorityCount( priorityCount_ ), priorities( priorities_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
priorityCount = priorityCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorities( std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR,VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
priorities = priorities_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyGlobalPriorityPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyGlobalPriorityPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, priorityCount, priorities );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( priorityCount == rhs.priorityCount )
|
|
&& ( priorities == rhs.priorities );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR;
|
|
void * pNext = {};
|
|
uint32_t priorityCount = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR, VK_MAX_GLOBAL_PRIORITY_SIZE_KHR> priorities = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value, "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityPropertiesKHR>
|
|
{
|
|
using Type = QueueFamilyGlobalPriorityPropertiesKHR;
|
|
};
|
|
using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR;
|
|
|
|
struct QueueFamilyProperties
|
|
{
|
|
using NativeType = VkQueueFamilyProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::QueueFlags const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent3D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyProperties const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( queueFlags == rhs.queueFlags )
|
|
&& ( queueCount == rhs.queueCount )
|
|
&& ( timestampValidBits == rhs.timestampValidBits )
|
|
&& ( minImageTransferGranularity == rhs.minImageTransferGranularity );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
|
|
uint32_t queueCount = {};
|
|
uint32_t timestampValidBits = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "QueueFamilyProperties is not nothrow_move_constructible!" );
|
|
|
|
struct QueueFamilyProperties2
|
|
{
|
|
using NativeType = VkQueueFamilyProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: queueFamilyProperties( queueFamilyProperties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::QueueFamilyProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, queueFamilyProperties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( queueFamilyProperties == rhs.queueFamilyProperties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "QueueFamilyProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
|
|
{
|
|
using Type = QueueFamilyProperties2;
|
|
};
|
|
using QueueFamilyProperties2KHR = QueueFamilyProperties2;
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct QueueFamilyQueryResultStatusProperties2KHR
|
|
{
|
|
using NativeType = VkQueueFamilyQueryResultStatusProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supported( supported_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyQueryResultStatusProperties2KHR( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: QueueFamilyQueryResultStatusProperties2KHR( *reinterpret_cast<QueueFamilyQueryResultStatusProperties2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
QueueFamilyQueryResultStatusProperties2KHR & operator=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
QueueFamilyQueryResultStatusProperties2KHR & operator=( VkQueueFamilyQueryResultStatusProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR & setSupported( VULKAN_HPP_NAMESPACE::Bool32 supported_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
supported = supported_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkQueueFamilyQueryResultStatusProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkQueueFamilyQueryResultStatusProperties2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkQueueFamilyQueryResultStatusProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkQueueFamilyQueryResultStatusProperties2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supported );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( QueueFamilyQueryResultStatusProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supported == rhs.supported );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( QueueFamilyQueryResultStatusProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supported = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR ) == sizeof( VkQueueFamilyQueryResultStatusProperties2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR>::value, "QueueFamilyQueryResultStatusProperties2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eQueueFamilyQueryResultStatusProperties2KHR>
|
|
{
|
|
using Type = QueueFamilyQueryResultStatusProperties2KHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
struct RayTracingShaderGroupCreateInfoKHR
|
|
{
|
|
using NativeType = VkRayTracingShaderGroupCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void * pShaderGroupCaptureReplayHandle_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
generalShader = generalShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
closestHitShader = closestHitShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
anyHitShader = anyHitShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
intersectionShader = intersectionShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type )
|
|
&& ( generalShader == rhs.generalShader )
|
|
&& ( closestHitShader == rhs.closestHitShader )
|
|
&& ( anyHitShader == rhs.anyHitShader )
|
|
&& ( intersectionShader == rhs.intersectionShader )
|
|
&& ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
|
|
uint32_t generalShader = {};
|
|
uint32_t closestHitShader = {};
|
|
uint32_t anyHitShader = {};
|
|
uint32_t intersectionShader = {};
|
|
const void * pShaderGroupCaptureReplayHandle = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value, "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
|
|
{
|
|
using Type = RayTracingShaderGroupCreateInfoKHR;
|
|
};
|
|
|
|
struct RayTracingPipelineInterfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ), maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize )
|
|
&& ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t maxPipelineRayPayloadSize = {};
|
|
uint32_t maxPipelineRayHitAttributeSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value, "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
|
|
{
|
|
using Type = RayTracingPipelineInterfaceCreateInfoKHR;
|
|
};
|
|
|
|
struct RayTracingPipelineCreateInfoKHR
|
|
{
|
|
using NativeType = VkRayTracingPipelineCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
|
|
: flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = stageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStages = pStages_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = static_cast<uint32_t>( stages_.size() );
|
|
pStages = stages_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = groupCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pGroups = pGroups_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = static_cast<uint32_t>( groups_.size() );
|
|
pGroups = groups_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLibraryInfo = pLibraryInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLibraryInterface = pLibraryInterface_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDynamicState = pDynamicState_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * const &, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxPipelineRayRecursionDepth, pLibraryInfo, pLibraryInterface, pDynamicState, layout, basePipelineHandle, basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( stageCount == rhs.stageCount )
|
|
&& ( pStages == rhs.pStages )
|
|
&& ( groupCount == rhs.groupCount )
|
|
&& ( pGroups == rhs.pGroups )
|
|
&& ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth )
|
|
&& ( pLibraryInfo == rhs.pLibraryInfo )
|
|
&& ( pLibraryInterface == rhs.pLibraryInterface )
|
|
&& ( pDynamicState == rhs.pDynamicState )
|
|
&& ( layout == rhs.layout )
|
|
&& ( basePipelineHandle == rhs.basePipelineHandle )
|
|
&& ( basePipelineIndex == rhs.basePipelineIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
uint32_t stageCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
|
|
uint32_t groupCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {};
|
|
uint32_t maxPipelineRayRecursionDepth = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {};
|
|
const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
|
|
{
|
|
using Type = RayTracingPipelineCreateInfoKHR;
|
|
};
|
|
|
|
struct RayTracingShaderGroupCreateInfoNV
|
|
{
|
|
using NativeType = VkRayTracingShaderGroupCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
type = type_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
generalShader = generalShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
closestHitShader = closestHitShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
anyHitShader = anyHitShader_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
intersectionShader = intersectionShader_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( type == rhs.type )
|
|
&& ( generalShader == rhs.generalShader )
|
|
&& ( closestHitShader == rhs.closestHitShader )
|
|
&& ( anyHitShader == rhs.anyHitShader )
|
|
&& ( intersectionShader == rhs.intersectionShader );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
|
|
uint32_t generalShader = {};
|
|
uint32_t closestHitShader = {};
|
|
uint32_t anyHitShader = {};
|
|
uint32_t intersectionShader = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
|
|
{
|
|
using Type = RayTracingShaderGroupCreateInfoNV;
|
|
};
|
|
|
|
struct RayTracingPipelineCreateInfoNV
|
|
{
|
|
using NativeType = VkRayTracingPipelineCreateInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
|
|
: flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = stageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStages = pStages_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageCount = static_cast<uint32_t>( stages_.size() );
|
|
pStages = stages_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = groupCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pGroups = pGroups_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
groupCount = static_cast<uint32_t>( groups_.size() );
|
|
pGroups = groups_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxRecursionDepth = maxRecursionDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layout = layout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineHandle = basePipelineHandle_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
basePipelineIndex = basePipelineIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineLayout const &, VULKAN_HPP_NAMESPACE::Pipeline const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( stageCount == rhs.stageCount )
|
|
&& ( pStages == rhs.pStages )
|
|
&& ( groupCount == rhs.groupCount )
|
|
&& ( pGroups == rhs.pGroups )
|
|
&& ( maxRecursionDepth == rhs.maxRecursionDepth )
|
|
&& ( layout == rhs.layout )
|
|
&& ( basePipelineHandle == rhs.basePipelineHandle )
|
|
&& ( basePipelineIndex == rhs.basePipelineIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
|
|
uint32_t stageCount = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {};
|
|
uint32_t groupCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {};
|
|
uint32_t maxRecursionDepth = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
|
|
VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
|
|
int32_t basePipelineIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
|
|
{
|
|
using Type = RayTracingPipelineCreateInfoNV;
|
|
};
|
|
|
|
struct RefreshCycleDurationGOOGLE
|
|
{
|
|
using NativeType = VkRefreshCycleDurationGOOGLE;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: refreshDuration( refreshDuration_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
|
|
}
|
|
|
|
explicit operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( refreshDuration );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default;
|
|
#else
|
|
bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( refreshDuration == rhs.refreshDuration );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint64_t refreshDuration = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" );
|
|
|
|
struct RenderPassAttachmentBeginInfo
|
|
{
|
|
using NativeType = VkRenderPassAttachmentBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
|
|
: attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageView * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentCount, pAttachments );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pAttachments == rhs.pAttachments );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
|
|
const void * pNext = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
|
|
{
|
|
using Type = RenderPassAttachmentBeginInfo;
|
|
};
|
|
using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
|
|
|
|
struct RenderPassBeginInfo
|
|
{
|
|
using NativeType = VkRenderPassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
|
|
: renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderPass = renderPass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framebuffer = framebuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderArea = renderArea_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValueCount = clearValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pClearValues = pClearValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValueCount = static_cast<uint32_t>( clearValues_.size() );
|
|
pClearValues = clearValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, VULKAN_HPP_NAMESPACE::Framebuffer const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ClearValue * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( renderPass == rhs.renderPass )
|
|
&& ( framebuffer == rhs.framebuffer )
|
|
&& ( renderArea == rhs.renderArea )
|
|
&& ( clearValueCount == rhs.clearValueCount )
|
|
&& ( pClearValues == rhs.pClearValues );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
|
|
uint32_t clearValueCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "RenderPassBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
|
|
{
|
|
using Type = RenderPassBeginInfo;
|
|
};
|
|
|
|
struct SubpassDescription
|
|
{
|
|
using NativeType = VkSubpassDescription;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
|
|
#else
|
|
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = inputAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAttachments = pInputAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
|
|
pInputAttachments = inputAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResolveAttachments = pResolveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
|
|
pResolveAttachments = resolveAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilAttachment = pDepthStencilAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = preserveAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPreserveAttachments = pPreserveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
|
|
pPreserveAttachments = preserveAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescription*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescription*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( flags, pipelineBindPoint, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassDescription const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( flags == rhs.flags )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( inputAttachmentCount == rhs.inputAttachmentCount )
|
|
&& ( pInputAttachments == rhs.pInputAttachments )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachments == rhs.pColorAttachments )
|
|
&& ( pResolveAttachments == rhs.pResolveAttachments )
|
|
&& ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
|
|
&& ( preserveAttachmentCount == rhs.preserveAttachmentCount )
|
|
&& ( pPreserveAttachments == rhs.pPreserveAttachments );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
uint32_t inputAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {};
|
|
uint32_t preserveAttachmentCount = {};
|
|
const uint32_t * pPreserveAttachments = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "SubpassDescription is not nothrow_move_constructible!" );
|
|
|
|
struct SubpassDependency
|
|
{
|
|
using NativeType = VkSubpassDependency;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubpass = srcSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubpass = dstSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDependency*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDependency*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassDependency const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( srcSubpass == rhs.srcSubpass )
|
|
&& ( dstSubpass == rhs.dstSubpass )
|
|
&& ( srcStageMask == rhs.srcStageMask )
|
|
&& ( dstStageMask == rhs.dstStageMask )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( dependencyFlags == rhs.dependencyFlags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t srcSubpass = {};
|
|
uint32_t dstSubpass = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "SubpassDependency is not nothrow_move_constructible!" );
|
|
|
|
struct RenderPassCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {} )
|
|
: flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSubpasses = pSubpasses_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( subpasses_.size() );
|
|
pSubpasses = subpasses_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDependencies = pDependencies_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
|
|
pDependencies = dependencies_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pAttachments == rhs.pAttachments )
|
|
&& ( subpassCount == rhs.subpassCount )
|
|
&& ( pSubpasses == rhs.pSubpasses )
|
|
&& ( dependencyCount == rhs.dependencyCount )
|
|
&& ( pDependencies == rhs.pDependencies );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {};
|
|
uint32_t subpassCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {};
|
|
uint32_t dependencyCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "RenderPassCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
|
|
{
|
|
using Type = RenderPassCreateInfo;
|
|
};
|
|
|
|
struct SubpassDescription2
|
|
{
|
|
using NativeType = VkSubpassDescription2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
|
|
: flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
|
|
#else
|
|
if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pipelineBindPoint = pipelineBindPoint_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = inputAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInputAttachments = pInputAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
|
|
pInputAttachments = inputAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pResolveAttachments = pResolveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
|
|
pResolveAttachments = resolveAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilAttachment = pDepthStencilAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = preserveAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPreserveAttachments = pPreserveAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
|
|
pPreserveAttachments = preserveAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescription2*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescription2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pipelineBindPoint, viewMask, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassDescription2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pipelineBindPoint == rhs.pipelineBindPoint )
|
|
&& ( viewMask == rhs.viewMask )
|
|
&& ( inputAttachmentCount == rhs.inputAttachmentCount )
|
|
&& ( pInputAttachments == rhs.pInputAttachments )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachments == rhs.pColorAttachments )
|
|
&& ( pResolveAttachments == rhs.pResolveAttachments )
|
|
&& ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
|
|
&& ( preserveAttachmentCount == rhs.preserveAttachmentCount )
|
|
&& ( pPreserveAttachments == rhs.pPreserveAttachments );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
|
|
uint32_t viewMask = {};
|
|
uint32_t inputAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {};
|
|
uint32_t preserveAttachmentCount = {};
|
|
const uint32_t * pPreserveAttachments = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "SubpassDescription2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDescription2>
|
|
{
|
|
using Type = SubpassDescription2;
|
|
};
|
|
using SubpassDescription2KHR = SubpassDescription2;
|
|
|
|
struct SubpassDependency2
|
|
{
|
|
using NativeType = VkSubpassDependency2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcSubpass = srcSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSubpass = dstSubpass_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcStageMask = srcStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstStageMask = dstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcAccessMask = srcAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstAccessMask = dstAccessMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyFlags = dependencyFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewOffset = viewOffset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDependency2*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDependency2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::AccessFlags const &, VULKAN_HPP_NAMESPACE::DependencyFlags const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassDependency2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcSubpass == rhs.srcSubpass )
|
|
&& ( dstSubpass == rhs.dstSubpass )
|
|
&& ( srcStageMask == rhs.srcStageMask )
|
|
&& ( dstStageMask == rhs.dstStageMask )
|
|
&& ( srcAccessMask == rhs.srcAccessMask )
|
|
&& ( dstAccessMask == rhs.dstAccessMask )
|
|
&& ( dependencyFlags == rhs.dependencyFlags )
|
|
&& ( viewOffset == rhs.viewOffset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
|
|
const void * pNext = {};
|
|
uint32_t srcSubpass = {};
|
|
uint32_t dstSubpass = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
|
|
VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
|
|
int32_t viewOffset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "SubpassDependency2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDependency2>
|
|
{
|
|
using Type = SubpassDependency2;
|
|
};
|
|
using SubpassDependency2KHR = SubpassDependency2;
|
|
|
|
struct RenderPassCreateInfo2
|
|
{
|
|
using NativeType = VkRenderPassCreateInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t * pCorrelatedViewMasks_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
|
|
: flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = attachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachments = pAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentCount = static_cast<uint32_t>( attachments_.size() );
|
|
pAttachments = attachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSubpasses = pSubpasses_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( subpasses_.size() );
|
|
pSubpasses = subpasses_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDependencies = pDependencies_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( dependencies_.size() );
|
|
pDependencies = dependencies_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlatedViewMaskCount = correlatedViewMaskCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCorrelatedViewMasks = pCorrelatedViewMasks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
|
|
pCorrelatedViewMasks = correlatedViewMasks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderPassCreateFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDescription2 * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassDependency2 * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies, correlatedViewMaskCount, pCorrelatedViewMasks );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( attachmentCount == rhs.attachmentCount )
|
|
&& ( pAttachments == rhs.pAttachments )
|
|
&& ( subpassCount == rhs.subpassCount )
|
|
&& ( pSubpasses == rhs.pSubpasses )
|
|
&& ( dependencyCount == rhs.dependencyCount )
|
|
&& ( pDependencies == rhs.pDependencies )
|
|
&& ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
|
|
&& ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
|
|
uint32_t attachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {};
|
|
uint32_t subpassCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {};
|
|
uint32_t dependencyCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {};
|
|
uint32_t correlatedViewMaskCount = {};
|
|
const uint32_t * pCorrelatedViewMasks = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "RenderPassCreateInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
|
|
{
|
|
using Type = RenderPassCreateInfo2;
|
|
};
|
|
using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
|
|
|
|
struct RenderPassFragmentDensityMapCreateInfoEXT
|
|
{
|
|
using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::AttachmentReference const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityMapAttachment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
|
|
{
|
|
using Type = RenderPassFragmentDensityMapCreateInfoEXT;
|
|
};
|
|
|
|
struct RenderPassInputAttachmentAspectCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ )
|
|
: aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectReferenceCount = aspectReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAspectReferences = pAspectReferences_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
|
|
pAspectReferences = aspectReferences_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( aspectReferenceCount == rhs.aspectReferenceCount )
|
|
&& ( pAspectReferences == rhs.pAspectReferences );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t aspectReferenceCount = {};
|
|
const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value, "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
|
|
{
|
|
using Type = RenderPassInputAttachmentAspectCreateInfo;
|
|
};
|
|
using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
|
|
|
|
struct RenderPassMultiviewCreateInfo
|
|
{
|
|
using NativeType = VkRenderPassMultiviewCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t * pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t * pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t * pCorrelationMasks_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
|
|
: subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = subpassCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewMasks = pViewMasks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassCount = static_cast<uint32_t>( viewMasks_.size() );
|
|
pViewMasks = viewMasks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = dependencyCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pViewOffsets = pViewOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
|
|
pViewOffsets = viewOffsets_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlationMaskCount = correlationMaskCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCorrelationMasks = pCorrelationMasks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
|
|
pCorrelationMasks = correlationMasks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint32_t * const &, uint32_t const &, const int32_t * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( subpassCount == rhs.subpassCount )
|
|
&& ( pViewMasks == rhs.pViewMasks )
|
|
&& ( dependencyCount == rhs.dependencyCount )
|
|
&& ( pViewOffsets == rhs.pViewOffsets )
|
|
&& ( correlationMaskCount == rhs.correlationMaskCount )
|
|
&& ( pCorrelationMasks == rhs.pCorrelationMasks );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
|
|
const void * pNext = {};
|
|
uint32_t subpassCount = {};
|
|
const uint32_t * pViewMasks = {};
|
|
uint32_t dependencyCount = {};
|
|
const int32_t * pViewOffsets = {};
|
|
uint32_t correlationMaskCount = {};
|
|
const uint32_t * pCorrelationMasks = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
|
|
{
|
|
using Type = RenderPassMultiviewCreateInfo;
|
|
};
|
|
using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
|
|
|
|
struct SubpassSampleLocationsEXT
|
|
{
|
|
using NativeType = VkSubpassSampleLocationsEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subpassIndex = subpassIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
sampleLocationsInfo = sampleLocationsInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( subpassIndex, sampleLocationsInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( subpassIndex == rhs.subpassIndex )
|
|
&& ( sampleLocationsInfo == rhs.sampleLocationsInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t subpassIndex = {};
|
|
VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "SubpassSampleLocationsEXT is not nothrow_move_constructible!" );
|
|
|
|
struct RenderPassSampleLocationsBeginInfoEXT
|
|
{
|
|
using NativeType = VkRenderPassSampleLocationsBeginInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {} )
|
|
: attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
|
|
pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
|
|
pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
|
|
&& ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
|
|
&& ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
|
|
&& ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t attachmentInitialSampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
|
|
uint32_t postSubpassSampleLocationsCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value, "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
|
|
{
|
|
using Type = RenderPassSampleLocationsBeginInfoEXT;
|
|
};
|
|
|
|
struct RenderPassTransformBeginInfoQCOM
|
|
{
|
|
using NativeType = VkRenderPassTransformBeginInfoQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
|
|
: transform( transform_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transform = transform_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, transform );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( transform == rhs.transform );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
|
|
{
|
|
using Type = RenderPassTransformBeginInfoQCOM;
|
|
};
|
|
|
|
struct RenderingAttachmentInfo
|
|
{
|
|
using NativeType = VkRenderingAttachmentInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageView( imageView_ ), imageLayout( imageLayout_ ), resolveMode( resolveMode_ ), resolveImageView( resolveImageView_ ), resolveImageLayout( resolveImageLayout_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), clearValue( clearValue_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderingAttachmentInfo( *reinterpret_cast<RenderingAttachmentInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveMode = resolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveImageView = resolveImageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
resolveImageLayout = resolveImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
loadOp = loadOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
storeOp = storeOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clearValue = clearValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingAttachmentInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingAttachmentInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::AttachmentLoadOp const &, VULKAN_HPP_NAMESPACE::AttachmentStoreOp const &, VULKAN_HPP_NAMESPACE::ClearValue const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
|
|
VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
|
|
VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "RenderingAttachmentInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingAttachmentInfo>
|
|
{
|
|
using Type = RenderingAttachmentInfo;
|
|
};
|
|
using RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
|
|
|
|
struct RenderingFragmentDensityMapAttachmentInfoEXT
|
|
{
|
|
using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: imageView( imageView_ ), imageLayout( imageLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast<RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingFragmentDensityMapAttachmentInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageView, imageLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageView == rhs.imageView )
|
|
&& ( imageLayout == rhs.imageLayout );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value, "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT>
|
|
{
|
|
using Type = RenderingFragmentDensityMapAttachmentInfoEXT;
|
|
};
|
|
|
|
struct RenderingFragmentShadingRateAttachmentInfoKHR
|
|
{
|
|
using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: imageView( imageView_ ), imageLayout( imageLayout_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageView = imageView_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageLayout = imageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingFragmentShadingRateAttachmentInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ImageView const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( imageView == rhs.imageView )
|
|
&& ( imageLayout == rhs.imageLayout )
|
|
&& ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageView = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value, "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR>
|
|
{
|
|
using Type = RenderingFragmentShadingRateAttachmentInfoKHR;
|
|
};
|
|
|
|
struct RenderingInfo
|
|
{
|
|
using NativeType = VkRenderingInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t layerCount_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), renderArea( renderArea_ ), layerCount( layerCount_ ), viewMask( viewMask_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pDepthAttachment( pDepthAttachment_ ), pStencilAttachment( pStencilAttachment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: RenderingInfo( *reinterpret_cast<RenderingInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, uint32_t layerCount_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {} )
|
|
: flags( flags_ ), renderArea( renderArea_ ), layerCount( layerCount_ ), viewMask( viewMask_ ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pDepthAttachment( pDepthAttachment_ ), pStencilAttachment( pStencilAttachment_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderingInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
renderArea = renderArea_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
viewMask = viewMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = colorAttachmentCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pColorAttachments = pColorAttachments_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
RenderingInfo & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
|
|
pColorAttachments = colorAttachments_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthAttachment = pDepthAttachment_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStencilAttachment = pStencilAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkRenderingInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkRenderingInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::RenderingFlags const &, VULKAN_HPP_NAMESPACE::Rect2D const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( RenderingInfo const & ) const = default;
|
|
#else
|
|
bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( renderArea == rhs.renderArea )
|
|
&& ( layerCount == rhs.layerCount )
|
|
&& ( viewMask == rhs.viewMask )
|
|
&& ( colorAttachmentCount == rhs.colorAttachmentCount )
|
|
&& ( pColorAttachments == rhs.pColorAttachments )
|
|
&& ( pDepthAttachment == rhs.pDepthAttachment )
|
|
&& ( pStencilAttachment == rhs.pStencilAttachment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderingFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
|
|
uint32_t layerCount = {};
|
|
uint32_t viewMask = {};
|
|
uint32_t colorAttachmentCount = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {};
|
|
const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "RenderingInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eRenderingInfo>
|
|
{
|
|
using Type = RenderingInfo;
|
|
};
|
|
using RenderingInfoKHR = RenderingInfo;
|
|
|
|
struct ResolveImageInfo2
|
|
{
|
|
using NativeType = VkResolveImageInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ResolveImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ResolveImageInfo2( *reinterpret_cast<ResolveImageInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ )
|
|
: srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImage = srcImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcImageLayout = srcImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImage = dstImage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstImageLayout = dstImageLayout_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = regionCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pRegions = pRegions_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ResolveImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2> const & regions_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
regionCount = static_cast<uint32_t>( regions_.size() );
|
|
pRegions = regions_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkResolveImageInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkResolveImageInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, VULKAN_HPP_NAMESPACE::Image const &, VULKAN_HPP_NAMESPACE::ImageLayout const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ImageResolve2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ResolveImageInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( srcImage == rhs.srcImage )
|
|
&& ( srcImageLayout == rhs.srcImageLayout )
|
|
&& ( dstImage == rhs.dstImage )
|
|
&& ( dstImageLayout == rhs.dstImageLayout )
|
|
&& ( regionCount == rhs.regionCount )
|
|
&& ( pRegions == rhs.pRegions );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Image srcImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Image dstImage = {};
|
|
VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
|
|
uint32_t regionCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "ResolveImageInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eResolveImageInfo2>
|
|
{
|
|
using Type = ResolveImageInfo2;
|
|
};
|
|
using ResolveImageInfo2KHR = ResolveImageInfo2;
|
|
|
|
struct SamplerBorderColorComponentMappingCreateInfoEXT
|
|
{
|
|
using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: components( components_ ), srgb( srgb_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast<SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
components = components_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srgb = srgb_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerBorderColorComponentMappingCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, components, srgb );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( components == rhs.components )
|
|
&& ( srgb == rhs.srgb );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 srgb = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) == sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value, "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT>
|
|
{
|
|
using Type = SamplerBorderColorComponentMappingCreateInfoEXT;
|
|
};
|
|
|
|
struct SamplerCreateInfo
|
|
{
|
|
using NativeType = VkSamplerCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
magFilter = magFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minFilter = minFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipmapMode = mipmapMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeU = addressModeU_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeV = addressModeV_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
addressModeW = addressModeW_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mipLodBias = mipLodBias_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
anisotropyEnable = anisotropyEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxAnisotropy = maxAnisotropy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareEnable = compareEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compareOp = compareOp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLod = minLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLod = maxLod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
borderColor = borderColor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
unnormalizedCoordinates = unnormalizedCoordinates_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerCreateFlags const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::SamplerMipmapMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, VULKAN_HPP_NAMESPACE::SamplerAddressMode const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::CompareOp const &, float const &, float const &, VULKAN_HPP_NAMESPACE::BorderColor const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, magFilter, minFilter, mipmapMode, addressModeU, addressModeV, addressModeW, mipLodBias, anisotropyEnable, maxAnisotropy, compareEnable, compareOp, minLod, maxLod, borderColor, unnormalizedCoordinates );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( magFilter == rhs.magFilter )
|
|
&& ( minFilter == rhs.minFilter )
|
|
&& ( mipmapMode == rhs.mipmapMode )
|
|
&& ( addressModeU == rhs.addressModeU )
|
|
&& ( addressModeV == rhs.addressModeV )
|
|
&& ( addressModeW == rhs.addressModeW )
|
|
&& ( mipLodBias == rhs.mipLodBias )
|
|
&& ( anisotropyEnable == rhs.anisotropyEnable )
|
|
&& ( maxAnisotropy == rhs.maxAnisotropy )
|
|
&& ( compareEnable == rhs.compareEnable )
|
|
&& ( compareOp == rhs.compareOp )
|
|
&& ( minLod == rhs.minLod )
|
|
&& ( maxLod == rhs.maxLod )
|
|
&& ( borderColor == rhs.borderColor )
|
|
&& ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
|
|
VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
|
|
float mipLodBias = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
|
|
float maxAnisotropy = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
|
|
VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
|
|
float minLod = {};
|
|
float maxLod = {};
|
|
VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
|
|
VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "SamplerCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerCreateInfo>
|
|
{
|
|
using Type = SamplerCreateInfo;
|
|
};
|
|
|
|
struct SamplerCustomBorderColorCreateInfoEXT
|
|
{
|
|
using NativeType = VkSamplerCustomBorderColorCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: customBorderColor( customBorderColor_ ), format( format_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
customBorderColor = customBorderColor_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ClearColorValue const &, VULKAN_HPP_NAMESPACE::Format const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, customBorderColor, format );
|
|
}
|
|
#endif
|
|
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value, "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
|
|
{
|
|
using Type = SamplerCustomBorderColorCreateInfoEXT;
|
|
};
|
|
|
|
struct SamplerReductionModeCreateInfo
|
|
{
|
|
using NativeType = VkSamplerReductionModeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage) VULKAN_HPP_NOEXCEPT
|
|
: reductionMode( reductionMode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
reductionMode = reductionMode_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerReductionMode const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, reductionMode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( reductionMode == rhs.reductionMode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
|
|
{
|
|
using Type = SamplerReductionModeCreateInfo;
|
|
};
|
|
using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
|
|
|
|
struct SamplerYcbcrConversionCreateInfo
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrModel = ycbcrModel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ycbcrRange = ycbcrRange_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
components = components_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
xChromaOffset = xChromaOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
yChromaOffset = yChromaOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
chromaFilter = chromaFilter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
forceExplicitReconstruction = forceExplicitReconstruction_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange const &, VULKAN_HPP_NAMESPACE::ComponentMapping const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::ChromaLocation const &, VULKAN_HPP_NAMESPACE::Filter const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format )
|
|
&& ( ycbcrModel == rhs.ycbcrModel )
|
|
&& ( ycbcrRange == rhs.ycbcrRange )
|
|
&& ( components == rhs.components )
|
|
&& ( xChromaOffset == rhs.xChromaOffset )
|
|
&& ( yChromaOffset == rhs.yChromaOffset )
|
|
&& ( chromaFilter == rhs.chromaFilter )
|
|
&& ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
|
|
VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
|
|
VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
|
|
VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
|
|
{
|
|
using Type = SamplerYcbcrConversionCreateInfo;
|
|
};
|
|
using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
|
|
|
|
struct SamplerYcbcrConversionImageFormatProperties
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionImageFormatProperties;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, combinedImageSamplerDescriptorCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
|
|
void * pNext = {};
|
|
uint32_t combinedImageSamplerDescriptorCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value, "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
|
|
{
|
|
using Type = SamplerYcbcrConversionImageFormatProperties;
|
|
};
|
|
using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
|
|
|
|
struct SamplerYcbcrConversionInfo
|
|
{
|
|
using NativeType = VkSamplerYcbcrConversionInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: conversion( conversion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
conversion = conversion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, conversion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( conversion == rhs.conversion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
|
|
{
|
|
using Type = SamplerYcbcrConversionInfo;
|
|
};
|
|
using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
|
|
|
|
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|
|
struct ScreenSurfaceCreateInfoQNX
|
|
{
|
|
using NativeType = VkScreenSurfaceCreateInfoQNX;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, struct _screen_context * context_ = {}, struct _screen_window * window_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), context( context_ ), window( window_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
context = context_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
window = window_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX*>( this );
|
|
}
|
|
|
|
explicit operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX const &, struct _screen_context * const &, struct _screen_window * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, context, window );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default;
|
|
#else
|
|
bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( context == rhs.context )
|
|
&& ( window == rhs.window );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {};
|
|
struct _screen_context * context = {};
|
|
struct _screen_window * window = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
|
|
{
|
|
using Type = ScreenSurfaceCreateInfoQNX;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
|
|
|
|
struct SemaphoreCreateInfo
|
|
{
|
|
using NativeType = VkSemaphoreCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "SemaphoreCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
|
|
{
|
|
using Type = SemaphoreCreateInfo;
|
|
};
|
|
|
|
struct SemaphoreGetFdInfoKHR
|
|
{
|
|
using NativeType = VkSemaphoreGetFdInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
|
|
{
|
|
using Type = SemaphoreGetFdInfoKHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct SemaphoreGetWin32HandleInfoKHR
|
|
{
|
|
using NativeType = VkSemaphoreGetWin32HandleInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
|
|
{
|
|
using Type = SemaphoreGetWin32HandleInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_FUCHSIA )
|
|
struct SemaphoreGetZirconHandleInfoFUCHSIA
|
|
{
|
|
using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), handleType( handleType_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
handleType = handleType_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, handleType );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( handleType == rhs.handleType );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value, "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
|
|
{
|
|
using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_FUCHSIA*/
|
|
|
|
struct SemaphoreSignalInfo
|
|
{
|
|
using NativeType = VkSemaphoreSignalInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), value( value_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value = value_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, value );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreSignalInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( value == rhs.value );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
uint64_t value = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "SemaphoreSignalInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
|
|
{
|
|
using Type = SemaphoreSignalInfo;
|
|
};
|
|
using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
|
|
|
|
struct SemaphoreSubmitInfo
|
|
{
|
|
using NativeType = VkSemaphoreSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, uint32_t deviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphore( semaphore_ ), value( value_ ), stageMask( stageMask_ ), deviceIndex( deviceIndex_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreSubmitInfo( *reinterpret_cast<SemaphoreSubmitInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphore = semaphore_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
value = value_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stageMask = stageMask_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceIndex = deviceIndex_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Semaphore const &, uint64_t const &, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphore == rhs.semaphore )
|
|
&& ( value == rhs.value )
|
|
&& ( stageMask == rhs.stageMask )
|
|
&& ( deviceIndex == rhs.deviceIndex );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
|
|
uint64_t value = {};
|
|
VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {};
|
|
uint32_t deviceIndex = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "SemaphoreSubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreSubmitInfo>
|
|
{
|
|
using Type = SemaphoreSubmitInfo;
|
|
};
|
|
using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
|
|
|
|
struct SemaphoreTypeCreateInfo
|
|
{
|
|
using NativeType = VkSemaphoreTypeCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: semaphoreType( semaphoreType_ ), initialValue( initialValue_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreType = semaphoreType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialValue = initialValue_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreType const &, uint64_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, semaphoreType, initialValue );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( semaphoreType == rhs.semaphoreType )
|
|
&& ( initialValue == rhs.initialValue );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
|
|
uint64_t initialValue = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
|
|
{
|
|
using Type = SemaphoreTypeCreateInfo;
|
|
};
|
|
using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
|
|
|
|
struct SemaphoreWaitInfo
|
|
{
|
|
using NativeType = VkSemaphoreWaitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, const uint64_t * pValues_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {} )
|
|
: flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
|
|
#else
|
|
if ( semaphores_.size() != values_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = semaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSemaphores = pSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
|
|
pSemaphores = semaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pValues = pValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
semaphoreCount = static_cast<uint32_t>( values_.size() );
|
|
pValues = values_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SemaphoreWaitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( semaphoreCount == rhs.semaphoreCount )
|
|
&& ( pSemaphores == rhs.pSemaphores )
|
|
&& ( pValues == rhs.pValues );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
|
|
uint32_t semaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {};
|
|
const uint64_t * pValues = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "SemaphoreWaitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
|
|
{
|
|
using Type = SemaphoreWaitInfo;
|
|
};
|
|
using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
|
|
|
|
struct SetStateFlagsIndirectCommandNV
|
|
{
|
|
using NativeType = VkSetStateFlagsIndirectCommandNV;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: data( data_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
data = data_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
|
|
}
|
|
|
|
explicit operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( data );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default;
|
|
#else
|
|
bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( data == rhs.data );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t data = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" );
|
|
|
|
struct ShaderModuleCreateInfo
|
|
{
|
|
using NativeType = VkShaderModuleCreateInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t * pCode_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
|
|
: flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codeSize = codeSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCode = pCode_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codeSize = code_.size() * 4;
|
|
pCode = code_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags const &, size_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, codeSize, pCode );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ShaderModuleCreateInfo const & ) const = default;
|
|
#else
|
|
bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( codeSize == rhs.codeSize )
|
|
&& ( pCode == rhs.pCode );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
|
|
size_t codeSize = {};
|
|
const uint32_t * pCode = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "ShaderModuleCreateInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
|
|
{
|
|
using Type = ShaderModuleCreateInfo;
|
|
};
|
|
|
|
struct ShaderModuleValidationCacheCreateInfoEXT
|
|
{
|
|
using NativeType = VkShaderModuleValidationCacheCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: validationCache( validationCache_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
validationCache = validationCache_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, validationCache );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( validationCache == rhs.validationCache );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value, "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
|
|
{
|
|
using Type = ShaderModuleValidationCacheCreateInfoEXT;
|
|
};
|
|
|
|
struct ShaderResourceUsageAMD
|
|
{
|
|
using NativeType = VkShaderResourceUsageAMD;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &, size_t const &, size_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ShaderResourceUsageAMD const & ) const = default;
|
|
#else
|
|
bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( numUsedVgprs == rhs.numUsedVgprs )
|
|
&& ( numUsedSgprs == rhs.numUsedSgprs )
|
|
&& ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
|
|
&& ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
|
|
&& ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t numUsedVgprs = {};
|
|
uint32_t numUsedSgprs = {};
|
|
uint32_t ldsSizePerLocalWorkGroup = {};
|
|
size_t ldsUsageSizeInBytes = {};
|
|
size_t scratchMemUsageInBytes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "ShaderResourceUsageAMD is not nothrow_move_constructible!" );
|
|
|
|
struct ShaderStatisticsInfoAMD
|
|
{
|
|
using NativeType = VkShaderStatisticsInfoAMD;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const & computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default;
|
|
#else
|
|
bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( shaderStageMask == rhs.shaderStageMask )
|
|
&& ( resourceUsage == rhs.resourceUsage )
|
|
&& ( numPhysicalVgprs == rhs.numPhysicalVgprs )
|
|
&& ( numPhysicalSgprs == rhs.numPhysicalSgprs )
|
|
&& ( numAvailableVgprs == rhs.numAvailableVgprs )
|
|
&& ( numAvailableSgprs == rhs.numAvailableSgprs )
|
|
&& ( computeWorkGroupSize == rhs.computeWorkGroupSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
|
|
VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
|
|
uint32_t numPhysicalVgprs = {};
|
|
uint32_t numPhysicalSgprs = {};
|
|
uint32_t numAvailableVgprs = {};
|
|
uint32_t numAvailableSgprs = {};
|
|
VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" );
|
|
|
|
struct SharedPresentSurfaceCapabilitiesKHR
|
|
{
|
|
using NativeType = VkSharedPresentSurfaceCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, sharedPresentSupportedUsageFlags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value, "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
|
|
{
|
|
using Type = SharedPresentSurfaceCapabilitiesKHR;
|
|
};
|
|
|
|
struct SparseImageFormatProperties
|
|
{
|
|
using NativeType = VkSparseImageFormatProperties;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::ImageAspectFlags const &, VULKAN_HPP_NAMESPACE::Extent3D const &, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( aspectMask, imageGranularity, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageFormatProperties const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( aspectMask == rhs.aspectMask )
|
|
&& ( imageGranularity == rhs.imageGranularity )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
|
|
VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "SparseImageFormatProperties is not nothrow_move_constructible!" );
|
|
|
|
struct SparseImageFormatProperties2
|
|
{
|
|
using NativeType = VkSparseImageFormatProperties2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: properties( properties_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, properties );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageFormatProperties2 const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( properties == rhs.properties );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "SparseImageFormatProperties2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
|
|
{
|
|
using Type = SparseImageFormatProperties2;
|
|
};
|
|
using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
|
|
|
|
struct SparseImageMemoryRequirements
|
|
{
|
|
using NativeType = VkSparseImageMemoryRequirements;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageMemoryRequirements const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( formatProperties == rhs.formatProperties )
|
|
&& ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
|
|
&& ( imageMipTailSize == rhs.imageMipTailSize )
|
|
&& ( imageMipTailOffset == rhs.imageMipTailOffset )
|
|
&& ( imageMipTailStride == rhs.imageMipTailStride );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
|
|
uint32_t imageMipTailFirstLod = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "SparseImageMemoryRequirements is not nothrow_move_constructible!" );
|
|
|
|
struct SparseImageMemoryRequirements2
|
|
{
|
|
using NativeType = VkSparseImageMemoryRequirements2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryRequirements( memoryRequirements_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
|
|
}
|
|
|
|
explicit operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryRequirements );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default;
|
|
#else
|
|
bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryRequirements == rhs.memoryRequirements );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
|
|
{
|
|
using Type = SparseImageMemoryRequirements2;
|
|
};
|
|
using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
|
|
|
|
#if defined( VK_USE_PLATFORM_GGP )
|
|
struct StreamDescriptorSurfaceCreateInfoGGP
|
|
{
|
|
using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), streamDescriptor( streamDescriptor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
streamDescriptor = streamDescriptor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
|
|
}
|
|
|
|
explicit operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP const &, GgpStreamDescriptor const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, streamDescriptor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
|
|
GgpStreamDescriptor streamDescriptor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value, "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
|
|
{
|
|
using Type = StreamDescriptorSurfaceCreateInfoGGP;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_GGP*/
|
|
|
|
struct StridedDeviceAddressRegionKHR
|
|
{
|
|
using NativeType = VkStridedDeviceAddressRegionKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
deviceAddress = deviceAddress_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
size = size_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::DeviceAddress const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( deviceAddress, stride, size );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default;
|
|
#else
|
|
bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( deviceAddress == rhs.deviceAddress )
|
|
&& ( stride == rhs.stride )
|
|
&& ( size == rhs.size );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize size = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" );
|
|
|
|
struct SubmitInfo
|
|
{
|
|
using NativeType = VkSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
|
|
: waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
|
|
#else
|
|
if ( waitSemaphores_.size() != waitDstStageMask_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = waitSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphores = pWaitSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
|
|
pWaitSemaphores = waitSemaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitDstStageMask = pWaitDstStageMask_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
|
|
pWaitDstStageMask = waitDstStageMask_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = commandBufferCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBuffers = pCommandBuffers_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
|
|
pCommandBuffers = commandBuffers_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = signalSemaphoreCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphores = pSignalSemaphores_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
|
|
pSignalSemaphores = signalSemaphores_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &, const VULKAN_HPP_NAMESPACE::PipelineStageFlags * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBuffer * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Semaphore * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreCount == rhs.waitSemaphoreCount )
|
|
&& ( pWaitSemaphores == rhs.pWaitSemaphores )
|
|
&& ( pWaitDstStageMask == rhs.pWaitDstStageMask )
|
|
&& ( commandBufferCount == rhs.commandBufferCount )
|
|
&& ( pCommandBuffers == rhs.pCommandBuffers )
|
|
&& ( signalSemaphoreCount == rhs.signalSemaphoreCount )
|
|
&& ( pSignalSemaphores == rhs.pSignalSemaphores );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {};
|
|
const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {};
|
|
uint32_t commandBufferCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {};
|
|
uint32_t signalSemaphoreCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "SubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubmitInfo>
|
|
{
|
|
using Type = SubmitInfo;
|
|
};
|
|
|
|
struct SubmitInfo2
|
|
{
|
|
using NativeType = VkSubmitInfo2;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo2(VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ), pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ), commandBufferInfoCount( commandBufferInfoCount_ ), pCommandBufferInfos( pCommandBufferInfos_ ), signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ), pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubmitInfo2( *reinterpret_cast<SubmitInfo2 const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ = {} )
|
|
: flags( flags_ ), waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) ), pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ), commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) ), pCommandBufferInfos( commandBufferInfos_.data() ), signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) ), pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2 const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
|
|
pWaitSemaphoreInfos = waitSemaphoreInfos_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferInfoCount = commandBufferInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCommandBufferInfos = pCommandBufferInfos_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo> const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
|
|
pCommandBufferInfos = commandBufferInfos_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubmitInfo2 & setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo> const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
|
|
pSignalSemaphoreInfos = signalSemaphoreInfos_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubmitInfo2*>( this );
|
|
}
|
|
|
|
explicit operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubmitInfo2*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubmitFlags const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, waitSemaphoreInfoCount, pWaitSemaphoreInfos, commandBufferInfoCount, pCommandBufferInfos, signalSemaphoreInfoCount, pSignalSemaphoreInfos );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubmitInfo2 const & ) const = default;
|
|
#else
|
|
bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount )
|
|
&& ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos )
|
|
&& ( commandBufferInfoCount == rhs.commandBufferInfoCount )
|
|
&& ( pCommandBufferInfos == rhs.pCommandBufferInfos )
|
|
&& ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount )
|
|
&& ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubmitFlags flags = {};
|
|
uint32_t waitSemaphoreInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {};
|
|
uint32_t commandBufferInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {};
|
|
uint32_t signalSemaphoreInfoCount = {};
|
|
const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "SubmitInfo2 is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubmitInfo2>
|
|
{
|
|
using Type = SubmitInfo2;
|
|
};
|
|
using SubmitInfo2KHR = SubmitInfo2;
|
|
|
|
struct SubpassBeginInfo
|
|
{
|
|
using NativeType = VkSubpassBeginInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
|
|
: contents( contents_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
contents = contents_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassBeginInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SubpassContents const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, contents );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassBeginInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( contents == rhs.contents );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "SubpassBeginInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassBeginInfo>
|
|
{
|
|
using Type = SubpassBeginInfo;
|
|
};
|
|
using SubpassBeginInfoKHR = SubpassBeginInfo;
|
|
|
|
struct SubpassDescriptionDepthStencilResolve
|
|
{
|
|
using NativeType = VkSubpassDescriptionDepthStencilResolve;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depthResolveMode = depthResolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stencilResolveMode = stencilResolveMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( depthResolveMode == rhs.depthResolveMode )
|
|
&& ( stencilResolveMode == rhs.stencilResolveMode )
|
|
&& ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
|
|
const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value, "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
|
|
{
|
|
using Type = SubpassDescriptionDepthStencilResolve;
|
|
};
|
|
using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
|
|
|
|
struct SubpassEndInfo
|
|
{
|
|
using NativeType = VkSubpassEndInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
|
|
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassEndInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassEndInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassEndInfo const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
|
|
const void * pNext = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "SubpassEndInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassEndInfo>
|
|
{
|
|
using Type = SubpassEndInfo;
|
|
};
|
|
using SubpassEndInfoKHR = SubpassEndInfo;
|
|
|
|
struct SubpassFragmentDensityMapOffsetEndInfoQCOM
|
|
{
|
|
using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(uint32_t fragmentDensityOffsetCount_ = {}, const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ), pFragmentDensityOffsets( pFragmentDensityOffsets_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast<SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassFragmentDensityMapOffsetEndInfoQCOM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_ )
|
|
: fragmentDensityOffsetCount( static_cast<uint32_t>( fragmentDensityOffsets_.size() ) ), pFragmentDensityOffsets( fragmentDensityOffsets_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityOffsetCount = fragmentDensityOffsetCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pFragmentDensityOffsets = pFragmentDensityOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Offset2D> const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fragmentDensityOffsetCount = static_cast<uint32_t>( fragmentDensityOffsets_.size() );
|
|
pFragmentDensityOffsets = fragmentDensityOffsets_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassFragmentDensityMapOffsetEndInfoQCOM*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassFragmentDensityMapOffsetEndInfoQCOM*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::Offset2D * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount )
|
|
&& ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM;
|
|
const void * pNext = {};
|
|
uint32_t fragmentDensityOffsetCount = {};
|
|
const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value, "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM>
|
|
{
|
|
using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM;
|
|
};
|
|
|
|
struct SubpassShadingPipelineCreateInfoHUAWEI
|
|
{
|
|
using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: renderPass( renderPass_ ), subpass( subpass_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast<SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
|
|
}
|
|
|
|
explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSubpassShadingPipelineCreateInfoHUAWEI*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::RenderPass const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, renderPass, subpass );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default;
|
|
#else
|
|
bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( renderPass == rhs.renderPass )
|
|
&& ( subpass == rhs.subpass );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
|
|
uint32_t subpass = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value, "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSubpassShadingPipelineCreateInfoHUAWEI>
|
|
{
|
|
using Type = SubpassShadingPipelineCreateInfoHUAWEI;
|
|
};
|
|
|
|
struct SurfaceCapabilities2EXT
|
|
{
|
|
using NativeType = VkSurfaceCapabilities2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags, supportedSurfaceCounters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( minImageCount == rhs.minImageCount )
|
|
&& ( maxImageCount == rhs.maxImageCount )
|
|
&& ( currentExtent == rhs.currentExtent )
|
|
&& ( minImageExtent == rhs.minImageExtent )
|
|
&& ( maxImageExtent == rhs.maxImageExtent )
|
|
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
|
|
&& ( supportedTransforms == rhs.supportedTransforms )
|
|
&& ( currentTransform == rhs.currentTransform )
|
|
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
|
|
&& ( supportedUsageFlags == rhs.supportedUsageFlags )
|
|
&& ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
|
|
void * pNext = {};
|
|
uint32_t minImageCount = {};
|
|
uint32_t maxImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "SurfaceCapabilities2EXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
|
|
{
|
|
using Type = SurfaceCapabilities2EXT;
|
|
};
|
|
|
|
struct SurfaceCapabilitiesKHR
|
|
{
|
|
using NativeType = VkSurfaceCapabilitiesKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( minImageCount == rhs.minImageCount )
|
|
&& ( maxImageCount == rhs.maxImageCount )
|
|
&& ( currentExtent == rhs.currentExtent )
|
|
&& ( minImageExtent == rhs.minImageExtent )
|
|
&& ( maxImageExtent == rhs.maxImageExtent )
|
|
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
|
|
&& ( supportedTransforms == rhs.supportedTransforms )
|
|
&& ( currentTransform == rhs.currentTransform )
|
|
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
|
|
&& ( supportedUsageFlags == rhs.supportedUsageFlags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t minImageCount = {};
|
|
uint32_t maxImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
|
|
uint32_t maxImageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
struct SurfaceCapabilities2KHR
|
|
{
|
|
using NativeType = VkSurfaceCapabilities2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: surfaceCapabilities( surfaceCapabilities_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceCapabilities );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( surfaceCapabilities == rhs.surfaceCapabilities );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "SurfaceCapabilities2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
|
|
{
|
|
using Type = SurfaceCapabilities2KHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct SurfaceCapabilitiesFullScreenExclusiveEXT
|
|
{
|
|
using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fullScreenExclusiveSupported );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
|
|
{
|
|
using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct SurfaceFormatKHR
|
|
{
|
|
using NativeType = VkSurfaceFormatKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ ), colorSpace( colorSpace_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( format, colorSpace );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceFormatKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( format == rhs.format )
|
|
&& ( colorSpace == rhs.colorSpace );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "SurfaceFormatKHR is not nothrow_move_constructible!" );
|
|
|
|
struct SurfaceFormat2KHR
|
|
{
|
|
using NativeType = VkSurfaceFormat2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: surfaceFormat( surfaceFormat_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceFormat );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceFormat2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( surfaceFormat == rhs.surfaceFormat );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "SurfaceFormat2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
|
|
{
|
|
using Type = SurfaceFormat2KHR;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct SurfaceFullScreenExclusiveInfoEXT
|
|
{
|
|
using NativeType = VkSurfaceFullScreenExclusiveInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault) VULKAN_HPP_NOEXCEPT
|
|
: fullScreenExclusive( fullScreenExclusive_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
fullScreenExclusive = fullScreenExclusive_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, fullScreenExclusive );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( fullScreenExclusive == rhs.fullScreenExclusive );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
|
|
{
|
|
using Type = SurfaceFullScreenExclusiveInfoEXT;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct SurfaceFullScreenExclusiveWin32InfoEXT
|
|
{
|
|
using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: hmonitor( hmonitor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hmonitor = hmonitor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, HMONITOR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, hmonitor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( hmonitor == rhs.hmonitor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
|
|
const void * pNext = {};
|
|
HMONITOR hmonitor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value, "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
|
|
{
|
|
using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct SurfaceProtectedCapabilitiesKHR
|
|
{
|
|
using NativeType = VkSurfaceProtectedCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supportsProtected( supportsProtected_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
supportsProtected = supportsProtected_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supportsProtected );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supportsProtected == rhs.supportsProtected );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
|
|
{
|
|
using Type = SurfaceProtectedCapabilitiesKHR;
|
|
};
|
|
|
|
struct SwapchainCounterCreateInfoEXT
|
|
{
|
|
using NativeType = VkSwapchainCounterCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: surfaceCounters( surfaceCounters_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surfaceCounters = surfaceCounters_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, surfaceCounters );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( surfaceCounters == rhs.surfaceCounters );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
|
|
{
|
|
using Type = SwapchainCounterCreateInfoEXT;
|
|
};
|
|
|
|
struct SwapchainCreateInfoKHR
|
|
{
|
|
using NativeType = VkSwapchainCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} )
|
|
: flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minImageCount = minImageCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageFormat = imageFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageColorSpace = imageColorSpace_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageExtent = imageExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageArrayLayers = imageArrayLayers_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageUsage = imageUsage_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageSharingMode = imageSharingMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = queueFamilyIndexCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pQueueFamilyIndices = pQueueFamilyIndices_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
|
|
pQueueFamilyIndices = queueFamilyIndices_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
preTransform = preTransform_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
compositeAlpha = compositeAlpha_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
presentMode = presentMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
clipped = clipped_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
oldSwapchain = oldSwapchain_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR const &, VULKAN_HPP_NAMESPACE::SurfaceKHR const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, VULKAN_HPP_NAMESPACE::SharingMode const &, uint32_t const &, const uint32_t * const &, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR const &, VULKAN_HPP_NAMESPACE::PresentModeKHR const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::SwapchainKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, surface, minImageCount, imageFormat, imageColorSpace, imageExtent, imageArrayLayers, imageUsage, imageSharingMode, queueFamilyIndexCount, pQueueFamilyIndices, preTransform, compositeAlpha, presentMode, clipped, oldSwapchain );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( surface == rhs.surface )
|
|
&& ( minImageCount == rhs.minImageCount )
|
|
&& ( imageFormat == rhs.imageFormat )
|
|
&& ( imageColorSpace == rhs.imageColorSpace )
|
|
&& ( imageExtent == rhs.imageExtent )
|
|
&& ( imageArrayLayers == rhs.imageArrayLayers )
|
|
&& ( imageUsage == rhs.imageUsage )
|
|
&& ( imageSharingMode == rhs.imageSharingMode )
|
|
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
|
|
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
|
|
&& ( preTransform == rhs.preTransform )
|
|
&& ( compositeAlpha == rhs.compositeAlpha )
|
|
&& ( presentMode == rhs.presentMode )
|
|
&& ( clipped == rhs.clipped )
|
|
&& ( oldSwapchain == rhs.oldSwapchain );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
|
|
uint32_t minImageCount = {};
|
|
VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
|
|
VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
|
|
uint32_t imageArrayLayers = {};
|
|
VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
|
|
VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
|
|
uint32_t queueFamilyIndexCount = {};
|
|
const uint32_t * pQueueFamilyIndices = {};
|
|
VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
|
|
VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
|
|
VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
|
|
VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
|
|
VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "SwapchainCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
|
|
{
|
|
using Type = SwapchainCreateInfoKHR;
|
|
};
|
|
|
|
struct SwapchainDisplayNativeHdrCreateInfoAMD
|
|
{
|
|
using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: localDimmingEnable( localDimmingEnable_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
localDimmingEnable = localDimmingEnable_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, localDimmingEnable );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default;
|
|
#else
|
|
bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( localDimmingEnable == rhs.localDimmingEnable );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value, "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
|
|
{
|
|
using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
|
|
};
|
|
|
|
struct TextureLODGatherFormatPropertiesAMD
|
|
{
|
|
using NativeType = VkTextureLODGatherFormatPropertiesAMD;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
|
|
}
|
|
|
|
explicit operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default;
|
|
#else
|
|
bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value, "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
|
|
{
|
|
using Type = TextureLODGatherFormatPropertiesAMD;
|
|
};
|
|
|
|
struct TimelineSemaphoreSubmitInfo
|
|
{
|
|
using NativeType = VkTimelineSemaphoreSubmitInfo;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
|
|
: waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValueCount = waitSemaphoreValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pWaitSemaphoreValues = pWaitSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
|
|
pWaitSemaphoreValues = waitSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValueCount = signalSemaphoreValueCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSignalSemaphoreValues = pSignalSemaphoreValues_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
|
|
pSignalSemaphoreValues = signalSemaphoreValues_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
|
|
}
|
|
|
|
explicit operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const uint64_t * const &, uint32_t const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
|
|
#else
|
|
bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
|
|
&& ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
|
|
&& ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
|
|
&& ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
|
|
const void * pNext = {};
|
|
uint32_t waitSemaphoreValueCount = {};
|
|
const uint64_t * pWaitSemaphoreValues = {};
|
|
uint32_t signalSemaphoreValueCount = {};
|
|
const uint64_t * pSignalSemaphoreValues = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
|
|
{
|
|
using Type = TimelineSemaphoreSubmitInfo;
|
|
};
|
|
using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
|
|
|
|
struct TraceRaysIndirectCommandKHR
|
|
{
|
|
using NativeType = VkTraceRaysIndirectCommandKHR;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: width( width_ ), height( height_ ), depth( depth_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
|
|
: width( extent2D.width )
|
|
, height( extent2D.height )
|
|
, depth( depth_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
width = width_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
height = height_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
depth = depth_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( width, height, depth );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default;
|
|
#else
|
|
bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( width == rhs.width )
|
|
&& ( height == rhs.height )
|
|
&& ( depth == rhs.depth );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t width = {};
|
|
uint32_t height = {};
|
|
uint32_t depth = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" );
|
|
|
|
struct ValidationCacheCreateInfoEXT
|
|
{
|
|
using NativeType = VkValidationCacheCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void * pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
|
|
: flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialDataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pInitialData = pInitialData_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialDataSize = initialData_.size() * sizeof(T);
|
|
pInitialData = initialData_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT const &, size_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, initialDataSize, pInitialData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( initialDataSize == rhs.initialDataSize )
|
|
&& ( pInitialData == rhs.pInitialData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
|
|
size_t initialDataSize = {};
|
|
const void * pInitialData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
|
|
{
|
|
using Type = ValidationCacheCreateInfoEXT;
|
|
};
|
|
|
|
struct ValidationFeaturesEXT
|
|
{
|
|
using NativeType = VkValidationFeaturesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {} )
|
|
: enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledValidationFeatureCount = enabledValidationFeatureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pEnabledValidationFeatures = pEnabledValidationFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
|
|
pEnabledValidationFeatures = enabledValidationFeatures_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationFeatureCount = disabledValidationFeatureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDisabledValidationFeatures = pDisabledValidationFeatures_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
|
|
pDisabledValidationFeatures = disabledValidationFeatures_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ValidationFeaturesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
|
|
&& ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
|
|
&& ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
|
|
&& ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
|
|
const void * pNext = {};
|
|
uint32_t enabledValidationFeatureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {};
|
|
uint32_t disabledValidationFeatureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "ValidationFeaturesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
|
|
{
|
|
using Type = ValidationFeaturesEXT;
|
|
};
|
|
|
|
struct ValidationFlagsEXT
|
|
{
|
|
using NativeType = VkValidationFlagsEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ )
|
|
: disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationCheckCount = disabledValidationCheckCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pDisabledValidationChecks = pDisabledValidationChecks_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
|
|
pDisabledValidationChecks = disabledValidationChecks_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkValidationFlagsEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ValidationFlagsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
|
|
&& ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
|
|
const void * pNext = {};
|
|
uint32_t disabledValidationCheckCount = {};
|
|
const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "ValidationFlagsEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eValidationFlagsEXT>
|
|
{
|
|
using Type = ValidationFlagsEXT;
|
|
};
|
|
|
|
struct VertexInputAttributeDescription2EXT
|
|
{
|
|
using NativeType = VkVertexInputAttributeDescription2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
location = location_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
format = format_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
offset = offset_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputAttributeDescription2EXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, location, binding, format, offset );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( location == rhs.location )
|
|
&& ( binding == rhs.binding )
|
|
&& ( format == rhs.format )
|
|
&& ( offset == rhs.offset );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT;
|
|
void * pNext = {};
|
|
uint32_t location = {};
|
|
uint32_t binding = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint32_t offset = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value, "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
|
|
{
|
|
using Type = VertexInputAttributeDescription2EXT;
|
|
};
|
|
|
|
struct VertexInputBindingDescription2EXT
|
|
{
|
|
using NativeType = VkVertexInputBindingDescription2EXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: binding( binding_ ), stride( stride_ ), inputRate( inputRate_ ), divisor( divisor_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
binding = binding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stride = stride_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputRate = inputRate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
divisor = divisor_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVertexInputBindingDescription2EXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVertexInputBindingDescription2EXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VertexInputRate const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, binding, stride, inputRate, divisor );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
|
|
#else
|
|
bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( binding == rhs.binding )
|
|
&& ( stride == rhs.stride )
|
|
&& ( inputRate == rhs.inputRate )
|
|
&& ( divisor == rhs.divisor );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT;
|
|
void * pNext = {};
|
|
uint32_t binding = {};
|
|
uint32_t stride = {};
|
|
VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
|
|
uint32_t divisor = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
|
|
{
|
|
using Type = VertexInputBindingDescription2EXT;
|
|
};
|
|
|
|
#if defined( VK_USE_PLATFORM_VI_NN )
|
|
struct ViSurfaceCreateInfoNN
|
|
{
|
|
using NativeType = VkViSurfaceCreateInfoNN;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), window( window_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
window = window_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
|
|
}
|
|
|
|
explicit operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN const &, void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, window );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default;
|
|
#else
|
|
bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( window == rhs.window );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
|
|
void * window = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
|
|
{
|
|
using Type = ViSurfaceCreateInfoNN;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_VI_NN*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoPictureResourceKHR
|
|
{
|
|
using NativeType = VkVideoPictureResourceKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR(VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, uint32_t baseArrayLayer_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: codedOffset( codedOffset_ ), codedExtent( codedExtent_ ), baseArrayLayer( baseArrayLayer_ ), imageViewBinding( imageViewBinding_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoPictureResourceKHR( *reinterpret_cast<VideoPictureResourceKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoPictureResourceKHR & operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codedOffset = codedOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codedExtent = codedExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
baseArrayLayer = baseArrayLayer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
imageViewBinding = imageViewBinding_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoPictureResourceKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoPictureResourceKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ImageView const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoPictureResourceKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( codedOffset == rhs.codedOffset )
|
|
&& ( codedExtent == rhs.codedExtent )
|
|
&& ( baseArrayLayer == rhs.baseArrayLayer )
|
|
&& ( imageViewBinding == rhs.imageViewBinding );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
|
|
uint32_t baseArrayLayer = {};
|
|
VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR ) == sizeof( VkVideoPictureResourceKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR>::value, "VideoPictureResourceKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoPictureResourceKHR>
|
|
{
|
|
using Type = VideoPictureResourceKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoReferenceSlotKHR
|
|
{
|
|
using NativeType = VkVideoReferenceSlotKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR(int8_t slotIndex_ = {}, const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: slotIndex( slotIndex_ ), pPictureResource( pPictureResource_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoReferenceSlotKHR( *reinterpret_cast<VideoReferenceSlotKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoReferenceSlotKHR & operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slotIndex = slotIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPictureResource = pPictureResource_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoReferenceSlotKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoReferenceSlotKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, slotIndex, pPictureResource );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoReferenceSlotKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( slotIndex == rhs.slotIndex )
|
|
&& ( pPictureResource == rhs.pPictureResource );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR;
|
|
const void * pNext = {};
|
|
int8_t slotIndex = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR>::value, "VideoReferenceSlotKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoReferenceSlotKHR>
|
|
{
|
|
using Type = VideoReferenceSlotKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoBeginCodingInfoKHR
|
|
{
|
|
using NativeType = VkVideoBeginCodingInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), codecQualityPreset( codecQualityPreset_ ), videoSession( videoSession_ ), videoSessionParameters( videoSessionParameters_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_ )
|
|
: flags( flags_ ), codecQualityPreset( codecQualityPreset_ ), videoSession( videoSession_ ), videoSessionParameters( videoSessionParameters_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setCodecQualityPreset( VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codecQualityPreset = codecQualityPreset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoSession = videoSession_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoSessionParameters = videoSessionParameters_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = referenceSlotCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceSlots = pReferenceSlots_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoBeginCodingInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
|
|
pReferenceSlots = referenceSlots_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoBeginCodingInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoBeginCodingInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, codecQualityPreset, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( codecQualityPreset == rhs.codecQualityPreset )
|
|
&& ( videoSession == rhs.videoSession )
|
|
&& ( videoSessionParameters == rhs.videoSessionParameters )
|
|
&& ( referenceSlotCount == rhs.referenceSlotCount )
|
|
&& ( pReferenceSlots == rhs.pReferenceSlots );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {};
|
|
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
|
|
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {};
|
|
uint32_t referenceSlotCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
|
|
{
|
|
using Type = VideoBeginCodingInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoBindMemoryKHR
|
|
{
|
|
using NativeType = VkVideoBindMemoryKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryBindIndex( memoryBindIndex_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), memorySize( memorySize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoBindMemoryKHR( *reinterpret_cast<VideoBindMemoryKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoBindMemoryKHR & operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBindIndex = memoryBindIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memory = memory_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryOffset = memoryOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memorySize = memorySize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoBindMemoryKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoBindMemoryKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceMemory const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoBindMemoryKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryBindIndex == rhs.memoryBindIndex )
|
|
&& ( memory == rhs.memory )
|
|
&& ( memoryOffset == rhs.memoryOffset )
|
|
&& ( memorySize == rhs.memorySize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR;
|
|
const void * pNext = {};
|
|
uint32_t memoryBindIndex = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR>::value, "VideoBindMemoryKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoBindMemoryKHR>
|
|
{
|
|
using Type = VideoBindMemoryKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoCapabilitiesKHR
|
|
{
|
|
using NativeType = VkVideoCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, uint32_t maxReferencePicturesSlotsCount_ = {}, uint32_t maxReferencePicturesActiveCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: capabilityFlags( capabilityFlags_ ), minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ), minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ), videoPictureExtentGranularity( videoPictureExtentGranularity_ ), minExtent( minExtent_ ), maxExtent( maxExtent_ ), maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ), maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, capabilityFlags, minBitstreamBufferOffsetAlignment, minBitstreamBufferSizeAlignment, videoPictureExtentGranularity, minExtent, maxExtent, maxReferencePicturesSlotsCount, maxReferencePicturesActiveCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( capabilityFlags == rhs.capabilityFlags )
|
|
&& ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment )
|
|
&& ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment )
|
|
&& ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity )
|
|
&& ( minExtent == rhs.minExtent )
|
|
&& ( maxExtent == rhs.maxExtent )
|
|
&& ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount )
|
|
&& ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D minExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {};
|
|
uint32_t maxReferencePicturesSlotsCount = {};
|
|
uint32_t maxReferencePicturesActiveCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "VideoCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
|
|
{
|
|
using Type = VideoCapabilitiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoCodingControlInfoKHR
|
|
{
|
|
using NativeType = VkVideoCodingControlInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoCodingControlInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoCodingControlInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoCodingControlInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "VideoCodingControlInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
|
|
{
|
|
using Type = VideoCodingControlInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeCapabilitiesKHR
|
|
{
|
|
using NativeType = VkVideoDecodeCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeCapabilitiesKHR( *reinterpret_cast<VideoDecodeCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeCapabilitiesKHR>
|
|
{
|
|
using Type = VideoDecodeCapabilitiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264CapabilitiesEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264CapabilitiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT(uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxLevel( maxLevel_ ), fieldOffsetGranularity( fieldOffsetGranularity_ ), stdExtensionVersion( stdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264CapabilitiesEXT( *reinterpret_cast<VideoDecodeH264CapabilitiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264CapabilitiesEXT & operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxLevel, fieldOffsetGranularity, stdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxLevel == rhs.maxLevel )
|
|
&& ( fieldOffsetGranularity == rhs.fieldOffsetGranularity )
|
|
&& ( stdExtensionVersion == rhs.stdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxLevel = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {};
|
|
VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT>::value, "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesEXT>
|
|
{
|
|
using Type = VideoDecodeH264CapabilitiesEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264DpbSlotInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264DpbSlotInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT(const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pStdReferenceInfo( pStdReferenceInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH264DpbSlotInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264DpbSlotInfoEXT & operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdReferenceInfo = pStdReferenceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264ReferenceInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pStdReferenceInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
|
|
const void * pNext = {};
|
|
const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT>::value, "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH264DpbSlotInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264MvcEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264MvcEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT(const StdVideoDecodeH264Mvc * pStdMvc_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pStdMvc( pStdMvc_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264MvcEXT( *reinterpret_cast<VideoDecodeH264MvcEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264MvcEXT & operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT & setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdMvc = pStdMvc_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264MvcEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264MvcEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264Mvc * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pStdMvc );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pStdMvc == rhs.pStdMvc );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT;
|
|
const void * pNext = {};
|
|
const StdVideoDecodeH264Mvc * pStdMvc = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT>::value, "VideoDecodeH264MvcEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264MvcEXT>
|
|
{
|
|
using Type = VideoDecodeH264MvcEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264PictureInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264PictureInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT(const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, uint32_t slicesCount_ = {}, const uint32_t * pSlicesDataOffsets_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pStdPictureInfo( pStdPictureInfo_ ), slicesCount( slicesCount_ ), pSlicesDataOffsets( pSlicesDataOffsets_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264PictureInfoEXT( *reinterpret_cast<VideoDecodeH264PictureInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
|
|
: pStdPictureInfo( pStdPictureInfo_ ), slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) ), pSlicesDataOffsets( slicesDataOffsets_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264PictureInfoEXT & operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdPictureInfo = pStdPictureInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slicesCount = slicesCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSlicesDataOffsets = pSlicesDataOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slicesCount = static_cast<uint32_t>( slicesDataOffsets_.size() );
|
|
pSlicesDataOffsets = slicesDataOffsets_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264PictureInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264PictureInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH264PictureInfo * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
|
|
&& ( slicesCount == rhs.slicesCount )
|
|
&& ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT;
|
|
const void * pNext = {};
|
|
const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {};
|
|
uint32_t slicesCount = {};
|
|
const uint32_t * pSlicesDataOffsets = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT>::value, "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH264PictureInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264ProfileEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264ProfileEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT(StdVideoH264ProfileIdc stdProfileIdc_ = {}, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stdProfileIdc( stdProfileIdc_ ), pictureLayout( pictureLayout_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264ProfileEXT( *reinterpret_cast<VideoDecodeH264ProfileEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264ProfileEXT & operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdProfileIdc = stdProfileIdc_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pictureLayout = pictureLayout_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264ProfileEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264ProfileEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stdProfileIdc, pictureLayout );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) return cmp;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 )
|
|
&& ( pictureLayout == rhs.pictureLayout );
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT;
|
|
const void * pNext = {};
|
|
StdVideoH264ProfileIdc stdProfileIdc = {};
|
|
VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT ) == sizeof( VkVideoDecodeH264ProfileEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT>::value, "VideoDecodeH264ProfileEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileEXT>
|
|
{
|
|
using Type = VideoDecodeH264ProfileEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264SessionCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264SessionCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT(VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pStdExtensionVersion( pStdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH264SessionCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264SessionCreateInfoEXT & operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionCreateInfoEXT & operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdExtensionVersion = pStdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pStdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pStdExtensionVersion == rhs.pStdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {};
|
|
const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH264SessionCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT>::value, "VideoDecodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionCreateInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH264SessionCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264SessionParametersAddInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(uint32_t spsStdCount_ = {}, const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH264PictureParameterSet * pPpsStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: spsStdCount( spsStdCount_ ), pSpsStd( pSpsStd_ ), ppsStdCount( ppsStdCount_ ), pPpsStd( pPpsStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264SessionParametersAddInfoEXT( *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ = {} )
|
|
: spsStdCount( static_cast<uint32_t>( spsStd_.size() ) ), pSpsStd( spsStd_.data() ), ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) ), pPpsStd( ppsStd_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264SessionParametersAddInfoEXT & operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionParametersAddInfoEXT & operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = spsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpsStd = pSpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH264SessionParametersAddInfoEXT & setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = static_cast<uint32_t>( spsStd_.size() );
|
|
pSpsStd = spsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = ppsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPpsStd = pPpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH264SessionParametersAddInfoEXT & setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
|
|
pPpsStd = ppsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( spsStdCount == rhs.spsStdCount )
|
|
&& ( pSpsStd == rhs.pSpsStd )
|
|
&& ( ppsStdCount == rhs.ppsStdCount )
|
|
&& ( pPpsStd == rhs.pPpsStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t spsStdCount = {};
|
|
const StdVideoH264SequenceParameterSet * pSpsStd = {};
|
|
uint32_t ppsStdCount = {};
|
|
const StdVideoH264PictureParameterSet * pPpsStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT>::value, "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH264SessionParametersAddInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH264SessionParametersCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH264SessionParametersCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxSpsStdCount( maxSpsStdCount_ ), maxPpsStdCount( maxPpsStdCount_ ), pParametersAddInfo( pParametersAddInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH264SessionParametersCreateInfoEXT( *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSpsStdCount = maxSpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPpsStdCount = maxPpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pParametersAddInfo = pParametersAddInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxSpsStdCount == rhs.maxSpsStdCount )
|
|
&& ( maxPpsStdCount == rhs.maxPpsStdCount )
|
|
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t maxSpsStdCount = {};
|
|
uint32_t maxPpsStdCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT ) == sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT>::value, "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH264SessionParametersCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265CapabilitiesEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265CapabilitiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT(uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxLevel( maxLevel_ ), stdExtensionVersion( stdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265CapabilitiesEXT( *reinterpret_cast<VideoDecodeH265CapabilitiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265CapabilitiesEXT & operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxLevel, stdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxLevel == rhs.maxLevel )
|
|
&& ( stdExtensionVersion == rhs.stdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT;
|
|
void * pNext = {};
|
|
uint32_t maxLevel = {};
|
|
VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT>::value, "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesEXT>
|
|
{
|
|
using Type = VideoDecodeH265CapabilitiesEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265DpbSlotInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265DpbSlotInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT(const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pStdReferenceInfo( pStdReferenceInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH265DpbSlotInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265DpbSlotInfoEXT & operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdReferenceInfo = pStdReferenceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const StdVideoDecodeH265ReferenceInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pStdReferenceInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT;
|
|
const void * pNext = {};
|
|
const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT>::value, "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH265DpbSlotInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265PictureInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265PictureInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT(StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, uint32_t slicesCount_ = {}, const uint32_t * pSlicesDataOffsets_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pStdPictureInfo( pStdPictureInfo_ ), slicesCount( slicesCount_ ), pSlicesDataOffsets( pSlicesDataOffsets_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265PictureInfoEXT( *reinterpret_cast<VideoDecodeH265PictureInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
|
|
: pStdPictureInfo( pStdPictureInfo_ ), slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) ), pSlicesDataOffsets( slicesDataOffsets_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265PictureInfoEXT & operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdPictureInfo = pStdPictureInfo_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slicesCount = slicesCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSlicesDataOffsets = pSlicesDataOffsets_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slicesCount = static_cast<uint32_t>( slicesDataOffsets_.size() );
|
|
pSlicesDataOffsets = slicesDataOffsets_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265PictureInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265PictureInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoDecodeH265PictureInfo * const &, uint32_t const &, const uint32_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pStdPictureInfo == rhs.pStdPictureInfo )
|
|
&& ( slicesCount == rhs.slicesCount )
|
|
&& ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT;
|
|
const void * pNext = {};
|
|
StdVideoDecodeH265PictureInfo * pStdPictureInfo = {};
|
|
uint32_t slicesCount = {};
|
|
const uint32_t * pSlicesDataOffsets = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT>::value, "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH265PictureInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265ProfileEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265ProfileEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT(StdVideoH265ProfileIdc stdProfileIdc_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stdProfileIdc( stdProfileIdc_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265ProfileEXT( *reinterpret_cast<VideoDecodeH265ProfileEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265ProfileEXT & operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdProfileIdc = stdProfileIdc_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265ProfileEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265ProfileEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stdProfileIdc );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT;
|
|
const void * pNext = {};
|
|
StdVideoH265ProfileIdc stdProfileIdc = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT ) == sizeof( VkVideoDecodeH265ProfileEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT>::value, "VideoDecodeH265ProfileEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileEXT>
|
|
{
|
|
using Type = VideoDecodeH265ProfileEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265SessionCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265SessionCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT(VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pStdExtensionVersion( pStdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH265SessionCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265SessionCreateInfoEXT & operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionCreateInfoEXT & operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdExtensionVersion = pStdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pStdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pStdExtensionVersion == rhs.pStdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {};
|
|
const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH265SessionCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT>::value, "VideoDecodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionCreateInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH265SessionCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265SessionParametersAddInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(uint32_t spsStdCount_ = {}, const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH265PictureParameterSet * pPpsStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: spsStdCount( spsStdCount_ ), pSpsStd( pSpsStd_ ), ppsStdCount( ppsStdCount_ ), pPpsStd( pPpsStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265SessionParametersAddInfoEXT( *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ = {} )
|
|
: spsStdCount( static_cast<uint32_t>( spsStd_.size() ) ), pSpsStd( spsStd_.data() ), ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) ), pPpsStd( ppsStd_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265SessionParametersAddInfoEXT & operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionParametersAddInfoEXT & operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = spsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpsStd = pSpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH265SessionParametersAddInfoEXT & setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = static_cast<uint32_t>( spsStd_.size() );
|
|
pSpsStd = spsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = ppsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPpsStd = pPpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeH265SessionParametersAddInfoEXT & setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
|
|
pPpsStd = ppsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( spsStdCount == rhs.spsStdCount )
|
|
&& ( pSpsStd == rhs.pSpsStd )
|
|
&& ( ppsStdCount == rhs.ppsStdCount )
|
|
&& ( pPpsStd == rhs.pPpsStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t spsStdCount = {};
|
|
const StdVideoH265SequenceParameterSet * pSpsStd = {};
|
|
uint32_t ppsStdCount = {};
|
|
const StdVideoH265PictureParameterSet * pPpsStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT>::value, "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH265SessionParametersAddInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeH265SessionParametersCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoDecodeH265SessionParametersCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxSpsStdCount( maxSpsStdCount_ ), maxPpsStdCount( maxPpsStdCount_ ), pParametersAddInfo( pParametersAddInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeH265SessionParametersCreateInfoEXT( *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSpsStdCount = maxSpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPpsStdCount = maxPpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pParametersAddInfo = pParametersAddInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxSpsStdCount == rhs.maxSpsStdCount )
|
|
&& ( maxPpsStdCount == rhs.maxPpsStdCount )
|
|
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t maxSpsStdCount = {};
|
|
uint32_t maxPpsStdCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT ) == sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT>::value, "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT>
|
|
{
|
|
using Type = VideoDecodeH265SessionParametersCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoDecodeInfoKHR
|
|
{
|
|
using NativeType = VkVideoDecodeInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), codedOffset( codedOffset_ ), codedExtent( codedExtent_ ), srcBuffer( srcBuffer_ ), srcBufferOffset( srcBufferOffset_ ), srcBufferRange( srcBufferRange_ ), dstPictureResource( dstPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, VULKAN_HPP_NAMESPACE::Offset2D codedOffset_, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_ )
|
|
: flags( flags_ ), codedOffset( codedOffset_ ), codedExtent( codedExtent_ ), srcBuffer( srcBuffer_ ), srcBufferOffset( srcBufferOffset_ ), srcBufferRange( srcBufferRange_ ), dstPictureResource( dstPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codedOffset = codedOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codedExtent = codedExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBuffer = srcBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBufferOffset = srcBufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcBufferRange = srcBufferRange_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstPictureResource = dstPictureResource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetupReferenceSlot = pSetupReferenceSlot_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = referenceSlotCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceSlots = pReferenceSlots_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoDecodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
|
|
pReferenceSlots = referenceSlots_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoDecodeInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoDecodeInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR const &, VULKAN_HPP_NAMESPACE::Offset2D const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, codedOffset, codedExtent, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoDecodeInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( codedOffset == rhs.codedOffset )
|
|
&& ( codedExtent == rhs.codedExtent )
|
|
&& ( srcBuffer == rhs.srcBuffer )
|
|
&& ( srcBufferOffset == rhs.srcBufferOffset )
|
|
&& ( srcBufferRange == rhs.srcBufferRange )
|
|
&& ( dstPictureResource == rhs.dstPictureResource )
|
|
&& ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
|
|
&& ( referenceSlotCount == rhs.referenceSlotCount )
|
|
&& ( pReferenceSlots == rhs.pReferenceSlots );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {};
|
|
VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {};
|
|
uint32_t referenceSlotCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "VideoDecodeInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
|
|
{
|
|
using Type = VideoDecodeInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeCapabilitiesKHR
|
|
{
|
|
using NativeType = VkVideoEncodeCapabilitiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, uint8_t rateControlLayerCount_ = {}, uint8_t qualityLevelCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), rateControlModes( rateControlModes_ ), rateControlLayerCount( rateControlLayerCount_ ), qualityLevelCount( qualityLevelCount_ ), inputImageDataFillAlignment( inputImageDataFillAlignment_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeCapabilitiesKHR( *reinterpret_cast<VideoEncodeCapabilitiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setRateControlModes( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rateControlModes = rateControlModes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setRateControlLayerCount( uint8_t rateControlLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rateControlLayerCount = rateControlLayerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qualityLevelCount = qualityLevelCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeCapabilitiesKHR & setInputImageDataFillAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataFillAlignment_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputImageDataFillAlignment = inputImageDataFillAlignment_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeCapabilitiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, rateControlModes, rateControlLayerCount, qualityLevelCount, inputImageDataFillAlignment );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( rateControlModes == rhs.rateControlModes )
|
|
&& ( rateControlLayerCount == rhs.rateControlLayerCount )
|
|
&& ( qualityLevelCount == rhs.qualityLevelCount )
|
|
&& ( inputImageDataFillAlignment == rhs.inputImageDataFillAlignment );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {};
|
|
uint8_t rateControlLayerCount = {};
|
|
uint8_t qualityLevelCount = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeCapabilitiesKHR>
|
|
{
|
|
using Type = VideoEncodeCapabilitiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264CapabilitiesEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264CapabilitiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ = {}, uint32_t maxBytesPerPicDenom_ = {}, uint32_t maxBitsPerMbDenom_ = {}, uint32_t log2MaxMvLengthHorizontal_ = {}, uint32_t log2MaxMvLengthVertical_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), inputModeFlags( inputModeFlags_ ), outputModeFlags( outputModeFlags_ ), maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ), maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ), maxL1ReferenceCount( maxL1ReferenceCount_ ), motionVectorsOverPicBoundariesFlag( motionVectorsOverPicBoundariesFlag_ ), maxBytesPerPicDenom( maxBytesPerPicDenom_ ), maxBitsPerMbDenom( maxBitsPerMbDenom_ ), log2MaxMvLengthHorizontal( log2MaxMvLengthHorizontal_ ), log2MaxMvLengthVertical( log2MaxMvLengthVertical_ ), stdExtensionVersion( stdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264CapabilitiesEXT( *reinterpret_cast<VideoEncodeH264CapabilitiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264CapabilitiesEXT & operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputModeFlags = inputModeFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
outputModeFlags = outputModeFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMaxPPictureL0ReferenceCount( uint8_t maxPPictureL0ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPPictureL0ReferenceCount = maxPPictureL0ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMaxBPictureL0ReferenceCount( uint8_t maxBPictureL0ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBPictureL0ReferenceCount = maxBPictureL0ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMaxL1ReferenceCount( uint8_t maxL1ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxL1ReferenceCount = maxL1ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMotionVectorsOverPicBoundariesFlag( VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
motionVectorsOverPicBoundariesFlag = motionVectorsOverPicBoundariesFlag_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMaxBytesPerPicDenom( uint32_t maxBytesPerPicDenom_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBytesPerPicDenom = maxBytesPerPicDenom_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setMaxBitsPerMbDenom( uint32_t maxBitsPerMbDenom_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBitsPerMbDenom = maxBitsPerMbDenom_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setLog2MaxMvLengthHorizontal( uint32_t log2MaxMvLengthHorizontal_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
log2MaxMvLengthHorizontal = log2MaxMvLengthHorizontal_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setLog2MaxMvLengthVertical( uint32_t log2MaxMvLengthVertical_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
log2MaxMvLengthVertical = log2MaxMvLengthVertical_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion( VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdExtensionVersion = stdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT const &, uint8_t const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, inputModeFlags, outputModeFlags, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, motionVectorsOverPicBoundariesFlag, maxBytesPerPicDenom, maxBitsPerMbDenom, log2MaxMvLengthHorizontal, log2MaxMvLengthVertical, stdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( inputModeFlags == rhs.inputModeFlags )
|
|
&& ( outputModeFlags == rhs.outputModeFlags )
|
|
&& ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
|
|
&& ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
|
|
&& ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
|
|
&& ( motionVectorsOverPicBoundariesFlag == rhs.motionVectorsOverPicBoundariesFlag )
|
|
&& ( maxBytesPerPicDenom == rhs.maxBytesPerPicDenom )
|
|
&& ( maxBitsPerMbDenom == rhs.maxBitsPerMbDenom )
|
|
&& ( log2MaxMvLengthHorizontal == rhs.log2MaxMvLengthHorizontal )
|
|
&& ( log2MaxMvLengthVertical == rhs.log2MaxMvLengthVertical )
|
|
&& ( stdExtensionVersion == rhs.stdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {};
|
|
uint8_t maxPPictureL0ReferenceCount = {};
|
|
uint8_t maxBPictureL0ReferenceCount = {};
|
|
uint8_t maxL1ReferenceCount = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag = {};
|
|
uint32_t maxBytesPerPicDenom = {};
|
|
uint32_t maxBitsPerMbDenom = {};
|
|
uint32_t log2MaxMvLengthHorizontal = {};
|
|
uint32_t log2MaxMvLengthVertical = {};
|
|
VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesEXT>
|
|
{
|
|
using Type = VideoEncodeH264CapabilitiesEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264DpbSlotInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264DpbSlotInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: slotIndex( slotIndex_ ), pStdReferenceInfo( pStdReferenceInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH264DpbSlotInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264DpbSlotInfoEXT & operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slotIndex = slotIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdReferenceInfo = pStdReferenceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH264ReferenceInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, slotIndex, pStdReferenceInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( slotIndex == rhs.slotIndex )
|
|
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
|
|
const void * pNext = {};
|
|
int8_t slotIndex = {};
|
|
const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264DpbSlotInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264EmitPictureParametersEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264EmitPictureParametersEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264EmitPictureParametersEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT(uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t * ppsIdEntries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: spsId( spsId_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( ppsIdEntryCount_ ), ppsIdEntries( ppsIdEntries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264EmitPictureParametersEXT( *reinterpret_cast<VideoEncodeH264EmitPictureParametersEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264EmitPictureParametersEXT( uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ )
|
|
: spsId( spsId_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) ), ppsIdEntries( psIdEntries_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264EmitPictureParametersEXT & operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264EmitPictureParametersEXT & operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsId = spsId_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
emitSpsEnable = emitSpsEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntryCount = ppsIdEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntries = ppsIdEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
|
|
ppsIdEntries = psIdEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264EmitPictureParametersEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264EmitPictureParametersEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const uint8_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, spsId, emitSpsEnable, ppsIdEntryCount, ppsIdEntries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( spsId == rhs.spsId )
|
|
&& ( emitSpsEnable == rhs.emitSpsEnable )
|
|
&& ( ppsIdEntryCount == rhs.ppsIdEntryCount )
|
|
&& ( ppsIdEntries == rhs.ppsIdEntries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT;
|
|
const void * pNext = {};
|
|
uint8_t spsId = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {};
|
|
uint32_t ppsIdEntryCount = {};
|
|
const uint8_t * ppsIdEntries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT ) == sizeof( VkVideoEncodeH264EmitPictureParametersEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT>::value, "VideoEncodeH264EmitPictureParametersEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264EmitPictureParametersEXT>
|
|
{
|
|
using Type = VideoEncodeH264EmitPictureParametersEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264FrameSizeEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264FrameSizeEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: frameISize( frameISize_ ), framePSize( framePSize_ ), frameBSize( frameBSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264FrameSizeEXT( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264FrameSizeEXT( *reinterpret_cast<VideoEncodeH264FrameSizeEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264FrameSizeEXT & operator=( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264FrameSizeEXT & operator=( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameISize = frameISize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framePSize = framePSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameBSize = frameBSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264FrameSizeEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264FrameSizeEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( frameISize, framePSize, frameBSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264FrameSizeEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( frameISize == rhs.frameISize )
|
|
&& ( framePSize == rhs.framePSize )
|
|
&& ( frameBSize == rhs.frameBSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t frameISize = {};
|
|
uint32_t framePSize = {};
|
|
uint32_t frameBSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == sizeof( VkVideoEncodeH264FrameSizeEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" );
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264ReferenceListsEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264ReferenceListsEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ReferenceListsEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: referenceList0EntryCount( referenceList0EntryCount_ ), pReferenceList0Entries( pReferenceList0Entries_ ), referenceList1EntryCount( referenceList1EntryCount_ ), pReferenceList1Entries( pReferenceList1Entries_ ), pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsEXT( VideoEncodeH264ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264ReferenceListsEXT( VkVideoEncodeH264ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264ReferenceListsEXT( *reinterpret_cast<VideoEncodeH264ReferenceListsEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264ReferenceListsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {} )
|
|
: referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) ), pReferenceList0Entries( referenceList0Entries_.data() ), referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) ), pReferenceList1Entries( referenceList1Entries_.data() ), pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264ReferenceListsEXT & operator=( VideoEncodeH264ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264ReferenceListsEXT & operator=( VkVideoEncodeH264ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList0EntryCount = referenceList0EntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceList0Entries = pReferenceList0Entries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264ReferenceListsEXT & setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
|
|
pReferenceList0Entries = referenceList0Entries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList1EntryCount = referenceList1EntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceList1Entries = pReferenceList1Entries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264ReferenceListsEXT & setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const & referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
|
|
pReferenceList1Entries = referenceList1Entries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPMemMgmtCtrlOperations( const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMemMgmtCtrlOperations = pMemMgmtCtrlOperations_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264ReferenceListsEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264ReferenceListsEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * const &, const StdVideoEncodeH264RefMemMgmtCtrlOperations * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pMemMgmtCtrlOperations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264ReferenceListsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( referenceList0EntryCount == rhs.referenceList0EntryCount )
|
|
&& ( pReferenceList0Entries == rhs.pReferenceList0Entries )
|
|
&& ( referenceList1EntryCount == rhs.referenceList1EntryCount )
|
|
&& ( pReferenceList1Entries == rhs.pReferenceList1Entries )
|
|
&& ( pMemMgmtCtrlOperations == rhs.pMemMgmtCtrlOperations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ReferenceListsEXT;
|
|
const void * pNext = {};
|
|
uint8_t referenceList0EntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries = {};
|
|
uint8_t referenceList1EntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries = {};
|
|
const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT ) == sizeof( VkVideoEncodeH264ReferenceListsEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT>::value, "VideoEncodeH264ReferenceListsEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264ReferenceListsEXT>
|
|
{
|
|
using Type = VideoEncodeH264ReferenceListsEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264NaluSliceEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264NaluSliceEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT(uint32_t mbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists_ = {}, const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: mbCount( mbCount_ ), pReferenceFinalLists( pReferenceFinalLists_ ), pSliceHeaderStd( pSliceHeaderStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264NaluSliceEXT( *reinterpret_cast<VideoEncodeH264NaluSliceEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264NaluSliceEXT & operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
mbCount = mbCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceFinalLists = pReferenceFinalLists_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSliceHeaderStd = pSliceHeaderStd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264NaluSliceEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264NaluSliceEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * const &, const StdVideoEncodeH264SliceHeader * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, mbCount, pReferenceFinalLists, pSliceHeaderStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( mbCount == rhs.mbCount )
|
|
&& ( pReferenceFinalLists == rhs.pReferenceFinalLists )
|
|
&& ( pSliceHeaderStd == rhs.pSliceHeaderStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT;
|
|
const void * pNext = {};
|
|
uint32_t mbCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists = {};
|
|
const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT ) == sizeof( VkVideoEncodeH264NaluSliceEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT>::value, "VideoEncodeH264NaluSliceEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceEXT>
|
|
{
|
|
using Type = VideoEncodeH264NaluSliceEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264ProfileEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264ProfileEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT(StdVideoH264ProfileIdc stdProfileIdc_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stdProfileIdc( stdProfileIdc_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264ProfileEXT( *reinterpret_cast<VideoEncodeH264ProfileEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264ProfileEXT & operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdProfileIdc = stdProfileIdc_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264ProfileEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264ProfileEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH264ProfileIdc const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stdProfileIdc );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT;
|
|
const void * pNext = {};
|
|
StdVideoH264ProfileIdc stdProfileIdc = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT ) == sizeof( VkVideoEncodeH264ProfileEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT>::value, "VideoEncodeH264ProfileEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileEXT>
|
|
{
|
|
using Type = VideoEncodeH264ProfileEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264QpEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264QpEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: qpI( qpI_ ), qpP( qpP_ ), qpB( qpB_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264QpEXT( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264QpEXT( *reinterpret_cast<VideoEncodeH264QpEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264QpEXT & operator=( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264QpEXT & operator=( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpI = qpI_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpP = qpP_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpB = qpB_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264QpEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264QpEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<int32_t const &, int32_t const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( qpI, qpP, qpB );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264QpEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( qpI == rhs.qpI )
|
|
&& ( qpP == rhs.qpP )
|
|
&& ( qpB == rhs.qpB );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t qpI = {};
|
|
int32_t qpP = {};
|
|
int32_t qpB = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "VideoEncodeH264QpEXT is not nothrow_move_constructible!" );
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264RateControlInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264RateControlInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown, uint8_t temporalLayerCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: gopFrameCount( gopFrameCount_ ), idrPeriod( idrPeriod_ ), consecutiveBFrameCount( consecutiveBFrameCount_ ), rateControlStructure( rateControlStructure_ ), temporalLayerCount( temporalLayerCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264RateControlInfoEXT( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264RateControlInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264RateControlInfoEXT & operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264RateControlInfoEXT & operator=( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
gopFrameCount = gopFrameCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
idrPeriod = idrPeriod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
consecutiveBFrameCount = consecutiveBFrameCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rateControlStructure = rateControlStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
temporalLayerCount = temporalLayerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264RateControlInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264RateControlInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT const &, uint8_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264RateControlInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( gopFrameCount == rhs.gopFrameCount )
|
|
&& ( idrPeriod == rhs.idrPeriod )
|
|
&& ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
|
|
&& ( rateControlStructure == rhs.rateControlStructure )
|
|
&& ( temporalLayerCount == rhs.temporalLayerCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t gopFrameCount = {};
|
|
uint32_t idrPeriod = {};
|
|
uint32_t consecutiveBFrameCount = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown;
|
|
uint8_t temporalLayerCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == sizeof( VkVideoEncodeH264RateControlInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264RateControlInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264RateControlLayerInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT(uint8_t temporalLayerId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: temporalLayerId( temporalLayerId_ ), useInitialRcQp( useInitialRcQp_ ), initialRcQp( initialRcQp_ ), useMinQp( useMinQp_ ), minQp( minQp_ ), useMaxQp( useMaxQp_ ), maxQp( maxQp_ ), useMaxFrameSize( useMaxFrameSize_ ), maxFrameSize( maxFrameSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264RateControlLayerInfoEXT( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264RateControlLayerInfoEXT & operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264RateControlLayerInfoEXT & operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
temporalLayerId = temporalLayerId_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useInitialRcQp = useInitialRcQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialRcQp = initialRcQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMinQp = useMinQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minQp = minQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMaxQp = useMaxQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxQp = maxQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMaxFrameSize = useMaxFrameSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxFrameSize = maxFrameSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264RateControlLayerInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264RateControlLayerInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, temporalLayerId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264RateControlLayerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( temporalLayerId == rhs.temporalLayerId )
|
|
&& ( useInitialRcQp == rhs.useInitialRcQp )
|
|
&& ( initialRcQp == rhs.initialRcQp )
|
|
&& ( useMinQp == rhs.useMinQp )
|
|
&& ( minQp == rhs.minQp )
|
|
&& ( useMaxQp == rhs.useMaxQp )
|
|
&& ( maxQp == rhs.maxQp )
|
|
&& ( useMaxFrameSize == rhs.useMaxFrameSize )
|
|
&& ( maxFrameSize == rhs.maxFrameSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT;
|
|
const void * pNext = {};
|
|
uint8_t temporalLayerId = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value, "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264RateControlLayerInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264RateControlLayerInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264SessionCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264SessionCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), maxPictureSizeInMbs( maxPictureSizeInMbs_ ), pStdExtensionVersion( pStdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264SessionCreateInfoEXT & operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionCreateInfoEXT & operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPictureSizeInMbs = maxPictureSizeInMbs_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdExtensionVersion = pStdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::Extent2D const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, maxPictureSizeInMbs, pStdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs )
|
|
&& ( pStdExtensionVersion == rhs.pStdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxPictureSizeInMbs = {};
|
|
const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value, "VideoEncodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionCreateInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264SessionCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264SessionParametersAddInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(uint32_t spsStdCount_ = {}, const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH264PictureParameterSet * pPpsStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: spsStdCount( spsStdCount_ ), pSpsStd( pSpsStd_ ), ppsStdCount( ppsStdCount_ ), pPpsStd( pPpsStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ = {} )
|
|
: spsStdCount( static_cast<uint32_t>( spsStd_.size() ) ), pSpsStd( spsStd_.data() ), ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) ), pPpsStd( ppsStd_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264SessionParametersAddInfoEXT & operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionParametersAddInfoEXT & operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = spsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpsStd = pSpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264SessionParametersAddInfoEXT & setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = static_cast<uint32_t>( spsStd_.size() );
|
|
pSpsStd = spsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = ppsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPpsStd = pPpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264SessionParametersAddInfoEXT & setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
|
|
pPpsStd = ppsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH264SequenceParameterSet * const &, uint32_t const &, const StdVideoH264PictureParameterSet * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( spsStdCount == rhs.spsStdCount )
|
|
&& ( pSpsStd == rhs.pSpsStd )
|
|
&& ( ppsStdCount == rhs.ppsStdCount )
|
|
&& ( pPpsStd == rhs.pPpsStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t spsStdCount = {};
|
|
const StdVideoH264SequenceParameterSet * pSpsStd = {};
|
|
uint32_t ppsStdCount = {};
|
|
const StdVideoH264PictureParameterSet * pPpsStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value, "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264SessionParametersAddInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264SessionParametersCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxSpsStdCount( maxSpsStdCount_ ), maxPpsStdCount( maxPpsStdCount_ ), pParametersAddInfo( pParametersAddInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSpsStdCount = maxSpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPpsStdCount = maxPpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pParametersAddInfo = pParametersAddInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxSpsStdCount == rhs.maxSpsStdCount )
|
|
&& ( maxPpsStdCount == rhs.maxPpsStdCount )
|
|
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t maxSpsStdCount = {};
|
|
uint32_t maxPpsStdCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value, "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264SessionParametersCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH264VclFrameInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH264VclFrameInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists_ = {}, uint32_t naluSliceEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ = {}, const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceEntryCount( naluSliceEntryCount_ ), pNaluSliceEntries( pNaluSliceEntries_ ), pCurrentPictureInfo( pCurrentPictureInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH264VclFrameInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const & naluSliceEntries_, const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {} )
|
|
: pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) ), pNaluSliceEntries( naluSliceEntries_.data() ), pCurrentPictureInfo( pCurrentPictureInfo_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH264VclFrameInfoEXT & operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceFinalLists = pReferenceFinalLists_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
naluSliceEntryCount = naluSliceEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNaluSliceEntries = pNaluSliceEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
|
|
pNaluSliceEntries = naluSliceEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo( const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCurrentPictureInfo = pCurrentPictureInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH264VclFrameInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH264VclFrameInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * const &, const StdVideoEncodeH264PictureInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pReferenceFinalLists == rhs.pReferenceFinalLists )
|
|
&& ( naluSliceEntryCount == rhs.naluSliceEntryCount )
|
|
&& ( pNaluSliceEntries == rhs.pNaluSliceEntries )
|
|
&& ( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT * pReferenceFinalLists = {};
|
|
uint32_t naluSliceEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries = {};
|
|
const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT>::value, "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH264VclFrameInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH264VclFrameInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265CapabilitiesEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265CapabilitiesEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, uint8_t maxSubLayersCount_ = {}, uint8_t minLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t minLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t minMaxTransformHierarchyDepthInter_ = {}, uint8_t maxMaxTransformHierarchyDepthInter_ = {}, uint8_t minMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxDiffCuQpDeltaDepth_ = {}, uint8_t minMaxNumMergeCand_ = {}, uint8_t maxMaxNumMergeCand_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), inputModeFlags( inputModeFlags_ ), outputModeFlags( outputModeFlags_ ), ctbSizes( ctbSizes_ ), transformBlockSizes( transformBlockSizes_ ), maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ), maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ), maxL1ReferenceCount( maxL1ReferenceCount_ ), maxSubLayersCount( maxSubLayersCount_ ), minLog2MinLumaCodingBlockSizeMinus3( minLog2MinLumaCodingBlockSizeMinus3_ ), maxLog2MinLumaCodingBlockSizeMinus3( maxLog2MinLumaCodingBlockSizeMinus3_ ), minLog2MinLumaTransformBlockSizeMinus2( minLog2MinLumaTransformBlockSizeMinus2_ ), maxLog2MinLumaTransformBlockSizeMinus2( maxLog2MinLumaTransformBlockSizeMinus2_ ), minMaxTransformHierarchyDepthInter( minMaxTransformHierarchyDepthInter_ ), maxMaxTransformHierarchyDepthInter( maxMaxTransformHierarchyDepthInter_ ), minMaxTransformHierarchyDepthIntra( minMaxTransformHierarchyDepthIntra_ ), maxMaxTransformHierarchyDepthIntra( maxMaxTransformHierarchyDepthIntra_ ), maxDiffCuQpDeltaDepth( maxDiffCuQpDeltaDepth_ ), minMaxNumMergeCand( minMaxNumMergeCand_ ), maxMaxNumMergeCand( maxMaxNumMergeCand_ ), stdExtensionVersion( stdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265CapabilitiesEXT( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265CapabilitiesEXT( *reinterpret_cast<VideoEncodeH265CapabilitiesEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265CapabilitiesEXT & operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265CapabilitiesEXT & operator=( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
inputModeFlags = inputModeFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
outputModeFlags = outputModeFlags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setCtbSizes( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ctbSizes = ctbSizes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setTransformBlockSizes( VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
transformBlockSizes = transformBlockSizes_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxPPictureL0ReferenceCount( uint8_t maxPPictureL0ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPPictureL0ReferenceCount = maxPPictureL0ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxBPictureL0ReferenceCount( uint8_t maxBPictureL0ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBPictureL0ReferenceCount = maxBPictureL0ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxL1ReferenceCount( uint8_t maxL1ReferenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxL1ReferenceCount = maxL1ReferenceCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxSubLayersCount( uint8_t maxSubLayersCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSubLayersCount = maxSubLayersCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMinLog2MinLumaCodingBlockSizeMinus3( uint8_t minLog2MinLumaCodingBlockSizeMinus3_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLog2MinLumaCodingBlockSizeMinus3 = minLog2MinLumaCodingBlockSizeMinus3_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxLog2MinLumaCodingBlockSizeMinus3( uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLog2MinLumaCodingBlockSizeMinus3 = maxLog2MinLumaCodingBlockSizeMinus3_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMinLog2MinLumaTransformBlockSizeMinus2( uint8_t minLog2MinLumaTransformBlockSizeMinus2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minLog2MinLumaTransformBlockSizeMinus2 = minLog2MinLumaTransformBlockSizeMinus2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxLog2MinLumaTransformBlockSizeMinus2( uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxLog2MinLumaTransformBlockSizeMinus2 = maxLog2MinLumaTransformBlockSizeMinus2_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMinMaxTransformHierarchyDepthInter( uint8_t minMaxTransformHierarchyDepthInter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minMaxTransformHierarchyDepthInter = minMaxTransformHierarchyDepthInter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxMaxTransformHierarchyDepthInter( uint8_t maxMaxTransformHierarchyDepthInter_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxMaxTransformHierarchyDepthInter = maxMaxTransformHierarchyDepthInter_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMinMaxTransformHierarchyDepthIntra( uint8_t minMaxTransformHierarchyDepthIntra_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minMaxTransformHierarchyDepthIntra = minMaxTransformHierarchyDepthIntra_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxMaxTransformHierarchyDepthIntra( uint8_t maxMaxTransformHierarchyDepthIntra_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxMaxTransformHierarchyDepthIntra = maxMaxTransformHierarchyDepthIntra_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxDiffCuQpDeltaDepth( uint8_t maxDiffCuQpDeltaDepth_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxDiffCuQpDeltaDepth = maxDiffCuQpDeltaDepth_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMinMaxNumMergeCand( uint8_t minMaxNumMergeCand_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minMaxNumMergeCand = minMaxNumMergeCand_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setMaxMaxNumMergeCand( uint8_t maxMaxNumMergeCand_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxMaxNumMergeCand = maxMaxNumMergeCand_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265CapabilitiesEXT & setStdExtensionVersion( VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdExtensionVersion = stdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265CapabilitiesEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::ExtensionProperties const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, inputModeFlags, outputModeFlags, ctbSizes, transformBlockSizes, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, maxSubLayersCount, minLog2MinLumaCodingBlockSizeMinus3, maxLog2MinLumaCodingBlockSizeMinus3, minLog2MinLumaTransformBlockSizeMinus2, maxLog2MinLumaTransformBlockSizeMinus2, minMaxTransformHierarchyDepthInter, maxMaxTransformHierarchyDepthInter, minMaxTransformHierarchyDepthIntra, maxMaxTransformHierarchyDepthIntra, maxDiffCuQpDeltaDepth, minMaxNumMergeCand, maxMaxNumMergeCand, stdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265CapabilitiesEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( inputModeFlags == rhs.inputModeFlags )
|
|
&& ( outputModeFlags == rhs.outputModeFlags )
|
|
&& ( ctbSizes == rhs.ctbSizes )
|
|
&& ( transformBlockSizes == rhs.transformBlockSizes )
|
|
&& ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount )
|
|
&& ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount )
|
|
&& ( maxL1ReferenceCount == rhs.maxL1ReferenceCount )
|
|
&& ( maxSubLayersCount == rhs.maxSubLayersCount )
|
|
&& ( minLog2MinLumaCodingBlockSizeMinus3 == rhs.minLog2MinLumaCodingBlockSizeMinus3 )
|
|
&& ( maxLog2MinLumaCodingBlockSizeMinus3 == rhs.maxLog2MinLumaCodingBlockSizeMinus3 )
|
|
&& ( minLog2MinLumaTransformBlockSizeMinus2 == rhs.minLog2MinLumaTransformBlockSizeMinus2 )
|
|
&& ( maxLog2MinLumaTransformBlockSizeMinus2 == rhs.maxLog2MinLumaTransformBlockSizeMinus2 )
|
|
&& ( minMaxTransformHierarchyDepthInter == rhs.minMaxTransformHierarchyDepthInter )
|
|
&& ( maxMaxTransformHierarchyDepthInter == rhs.maxMaxTransformHierarchyDepthInter )
|
|
&& ( minMaxTransformHierarchyDepthIntra == rhs.minMaxTransformHierarchyDepthIntra )
|
|
&& ( maxMaxTransformHierarchyDepthIntra == rhs.maxMaxTransformHierarchyDepthIntra )
|
|
&& ( maxDiffCuQpDeltaDepth == rhs.maxDiffCuQpDeltaDepth )
|
|
&& ( minMaxNumMergeCand == rhs.minMaxNumMergeCand )
|
|
&& ( maxMaxNumMergeCand == rhs.maxMaxNumMergeCand )
|
|
&& ( stdExtensionVersion == rhs.stdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes = {};
|
|
uint8_t maxPPictureL0ReferenceCount = {};
|
|
uint8_t maxBPictureL0ReferenceCount = {};
|
|
uint8_t maxL1ReferenceCount = {};
|
|
uint8_t maxSubLayersCount = {};
|
|
uint8_t minLog2MinLumaCodingBlockSizeMinus3 = {};
|
|
uint8_t maxLog2MinLumaCodingBlockSizeMinus3 = {};
|
|
uint8_t minLog2MinLumaTransformBlockSizeMinus2 = {};
|
|
uint8_t maxLog2MinLumaTransformBlockSizeMinus2 = {};
|
|
uint8_t minMaxTransformHierarchyDepthInter = {};
|
|
uint8_t maxMaxTransformHierarchyDepthInter = {};
|
|
uint8_t minMaxTransformHierarchyDepthIntra = {};
|
|
uint8_t maxMaxTransformHierarchyDepthIntra = {};
|
|
uint8_t maxDiffCuQpDeltaDepth = {};
|
|
uint8_t minMaxNumMergeCand = {};
|
|
uint8_t maxMaxNumMergeCand = {};
|
|
VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == sizeof( VkVideoEncodeH265CapabilitiesEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265CapabilitiesEXT>
|
|
{
|
|
using Type = VideoEncodeH265CapabilitiesEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265DpbSlotInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265DpbSlotInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: slotIndex( slotIndex_ ), pStdReferenceInfo( pStdReferenceInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265DpbSlotInfoEXT( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH265DpbSlotInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265DpbSlotInfoEXT & operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265DpbSlotInfoEXT & operator=( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
slotIndex = slotIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdReferenceInfo = pStdReferenceInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265DpbSlotInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, int8_t const &, const StdVideoEncodeH265ReferenceInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, slotIndex, pStdReferenceInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265DpbSlotInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( slotIndex == rhs.slotIndex )
|
|
&& ( pStdReferenceInfo == rhs.pStdReferenceInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoEXT;
|
|
const void * pNext = {};
|
|
int8_t slotIndex = {};
|
|
const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH265DpbSlotInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265DpbSlotInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265DpbSlotInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265EmitPictureParametersEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265EmitPictureParametersEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265EmitPictureParametersEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT(uint8_t vpsId_ = {}, uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t * ppsIdEntries_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vpsId( vpsId_ ), spsId( spsId_ ), emitVpsEnable( emitVpsEnable_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( ppsIdEntryCount_ ), ppsIdEntries( ppsIdEntries_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT( VideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265EmitPictureParametersEXT( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265EmitPictureParametersEXT( *reinterpret_cast<VideoEncodeH265EmitPictureParametersEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265EmitPictureParametersEXT( uint8_t vpsId_, uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ )
|
|
: vpsId( vpsId_ ), spsId( spsId_ ), emitVpsEnable( emitVpsEnable_ ), emitSpsEnable( emitSpsEnable_ ), ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) ), ppsIdEntries( psIdEntries_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265EmitPictureParametersEXT & operator=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265EmitPictureParametersEXT & operator=( VkVideoEncodeH265EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vpsId = vpsId_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsId = spsId_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
emitVpsEnable = emitVpsEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
emitSpsEnable = emitSpsEnable_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntryCount = ppsIdEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntries = ppsIdEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265EmitPictureParametersEXT & setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
|
|
ppsIdEntries = psIdEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265EmitPictureParametersEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265EmitPictureParametersEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, const uint8_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vpsId, spsId, emitVpsEnable, emitSpsEnable, ppsIdEntryCount, ppsIdEntries );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265EmitPictureParametersEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vpsId == rhs.vpsId )
|
|
&& ( spsId == rhs.spsId )
|
|
&& ( emitVpsEnable == rhs.emitVpsEnable )
|
|
&& ( emitSpsEnable == rhs.emitSpsEnable )
|
|
&& ( ppsIdEntryCount == rhs.ppsIdEntryCount )
|
|
&& ( ppsIdEntries == rhs.ppsIdEntries );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersEXT;
|
|
const void * pNext = {};
|
|
uint8_t vpsId = {};
|
|
uint8_t spsId = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {};
|
|
uint32_t ppsIdEntryCount = {};
|
|
const uint8_t * ppsIdEntries = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT ) == sizeof( VkVideoEncodeH265EmitPictureParametersEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT>::value, "VideoEncodeH265EmitPictureParametersEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265EmitPictureParametersEXT>
|
|
{
|
|
using Type = VideoEncodeH265EmitPictureParametersEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265FrameSizeEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265FrameSizeEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: frameISize( frameISize_ ), framePSize( framePSize_ ), frameBSize( frameBSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265FrameSizeEXT( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265FrameSizeEXT( *reinterpret_cast<VideoEncodeH265FrameSizeEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265FrameSizeEXT & operator=( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265FrameSizeEXT & operator=( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameISize = frameISize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
framePSize = framePSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameBSize = frameBSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265FrameSizeEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265FrameSizeEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( frameISize, framePSize, frameBSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265FrameSizeEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( frameISize == rhs.frameISize )
|
|
&& ( framePSize == rhs.framePSize )
|
|
&& ( frameBSize == rhs.frameBSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
uint32_t frameISize = {};
|
|
uint32_t framePSize = {};
|
|
uint32_t frameBSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == sizeof( VkVideoEncodeH265FrameSizeEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" );
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265ReferenceListsEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265ReferenceListsEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: referenceList0EntryCount( referenceList0EntryCount_ ), pReferenceList0Entries( pReferenceList0Entries_ ), referenceList1EntryCount( referenceList1EntryCount_ ), pReferenceList1Entries( pReferenceList1Entries_ ), pReferenceModifications( pReferenceModifications_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265ReferenceListsEXT( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265ReferenceListsEXT( *reinterpret_cast<VideoEncodeH265ReferenceListsEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265ReferenceListsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {} )
|
|
: referenceList0EntryCount( static_cast<uint8_t>( referenceList0Entries_.size() ) ), pReferenceList0Entries( referenceList0Entries_.data() ), referenceList1EntryCount( static_cast<uint8_t>( referenceList1Entries_.size() ) ), pReferenceList1Entries( referenceList1Entries_.data() ), pReferenceModifications( pReferenceModifications_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265ReferenceListsEXT & operator=( VideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265ReferenceListsEXT & operator=( VkVideoEncodeH265ReferenceListsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList0EntryCount = referenceList0EntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceList0Entries = pReferenceList0Entries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265ReferenceListsEXT & setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList0Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList0EntryCount = static_cast<uint8_t>( referenceList0Entries_.size() );
|
|
pReferenceList0Entries = referenceList0Entries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList1EntryCount = referenceList1EntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceList1Entries = pReferenceList1Entries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265ReferenceListsEXT & setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT> const & referenceList1Entries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceList1EntryCount = static_cast<uint8_t>( referenceList1Entries_.size() );
|
|
pReferenceList1Entries = referenceList1Entries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceModifications( const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceModifications = pReferenceModifications_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265ReferenceListsEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265ReferenceListsEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * const &, const StdVideoEncodeH265ReferenceModifications * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pReferenceModifications );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265ReferenceListsEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( referenceList0EntryCount == rhs.referenceList0EntryCount )
|
|
&& ( pReferenceList0Entries == rhs.pReferenceList0Entries )
|
|
&& ( referenceList1EntryCount == rhs.referenceList1EntryCount )
|
|
&& ( pReferenceList1Entries == rhs.pReferenceList1Entries )
|
|
&& ( pReferenceModifications == rhs.pReferenceModifications );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265ReferenceListsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsEXT;
|
|
const void * pNext = {};
|
|
uint8_t referenceList0EntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries = {};
|
|
uint8_t referenceList1EntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries = {};
|
|
const StdVideoEncodeH265ReferenceModifications * pReferenceModifications = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT ) == sizeof( VkVideoEncodeH265ReferenceListsEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT>::value, "VideoEncodeH265ReferenceListsEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265ReferenceListsEXT>
|
|
{
|
|
using Type = VideoEncodeH265ReferenceListsEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265NaluSliceSegmentEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265NaluSliceSegmentEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT(uint32_t ctbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {}, const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: ctbCount( ctbCount_ ), pReferenceFinalLists( pReferenceFinalLists_ ), pSliceSegmentHeaderStd( pSliceSegmentHeaderStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265NaluSliceSegmentEXT( VkVideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265NaluSliceSegmentEXT( *reinterpret_cast<VideoEncodeH265NaluSliceSegmentEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265NaluSliceSegmentEXT & operator=( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265NaluSliceSegmentEXT & operator=( VkVideoEncodeH265NaluSliceSegmentEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ctbCount = ctbCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceFinalLists = pReferenceFinalLists_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPSliceSegmentHeaderStd( const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSliceSegmentHeaderStd = pSliceSegmentHeaderStd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265NaluSliceSegmentEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265NaluSliceSegmentEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265NaluSliceSegmentEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265NaluSliceSegmentEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * const &, const StdVideoEncodeH265SliceSegmentHeader * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, ctbCount, pReferenceFinalLists, pSliceSegmentHeaderStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265NaluSliceSegmentEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( ctbCount == rhs.ctbCount )
|
|
&& ( pReferenceFinalLists == rhs.pReferenceFinalLists )
|
|
&& ( pSliceSegmentHeaderStd == rhs.pSliceSegmentHeaderStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265NaluSliceSegmentEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentEXT;
|
|
const void * pNext = {};
|
|
uint32_t ctbCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {};
|
|
const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT ) == sizeof( VkVideoEncodeH265NaluSliceSegmentEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT>::value, "VideoEncodeH265NaluSliceSegmentEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265NaluSliceSegmentEXT>
|
|
{
|
|
using Type = VideoEncodeH265NaluSliceSegmentEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265ProfileEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265ProfileEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT(StdVideoH265ProfileIdc stdProfileIdc_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: stdProfileIdc( stdProfileIdc_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265ProfileEXT( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265ProfileEXT( *reinterpret_cast<VideoEncodeH265ProfileEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265ProfileEXT & operator=( VideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265ProfileEXT & operator=( VkVideoEncodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
stdProfileIdc = stdProfileIdc_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265ProfileEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265ProfileEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, StdVideoH265ProfileIdc const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, stdProfileIdc );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileEXT;
|
|
const void * pNext = {};
|
|
StdVideoH265ProfileIdc stdProfileIdc = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT ) == sizeof( VkVideoEncodeH265ProfileEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT>::value, "VideoEncodeH265ProfileEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265ProfileEXT>
|
|
{
|
|
using Type = VideoEncodeH265ProfileEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265QpEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265QpEXT;
|
|
|
|
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: qpI( qpI_ ), qpP( qpP_ ), qpB( qpB_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265QpEXT( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265QpEXT( *reinterpret_cast<VideoEncodeH265QpEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265QpEXT & operator=( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265QpEXT & operator=( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpI = qpI_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpP = qpP_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qpB = qpB_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265QpEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265QpEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<int32_t const &, int32_t const &, int32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( qpI, qpP, qpB );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265QpEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( qpI == rhs.qpI )
|
|
&& ( qpP == rhs.qpP )
|
|
&& ( qpB == rhs.qpB );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
int32_t qpI = {};
|
|
int32_t qpP = {};
|
|
int32_t qpB = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "VideoEncodeH265QpEXT is not nothrow_move_constructible!" );
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265RateControlInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265RateControlInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown, uint8_t subLayerCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: gopFrameCount( gopFrameCount_ ), idrPeriod( idrPeriod_ ), consecutiveBFrameCount( consecutiveBFrameCount_ ), rateControlStructure( rateControlStructure_ ), subLayerCount( subLayerCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265RateControlInfoEXT( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265RateControlInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265RateControlInfoEXT & operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265RateControlInfoEXT & operator=( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
gopFrameCount = gopFrameCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
idrPeriod = idrPeriod_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
consecutiveBFrameCount = consecutiveBFrameCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rateControlStructure = rateControlStructure_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
subLayerCount = subLayerCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265RateControlInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265RateControlInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT const &, uint8_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265RateControlInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( gopFrameCount == rhs.gopFrameCount )
|
|
&& ( idrPeriod == rhs.idrPeriod )
|
|
&& ( consecutiveBFrameCount == rhs.consecutiveBFrameCount )
|
|
&& ( rateControlStructure == rhs.rateControlStructure )
|
|
&& ( subLayerCount == rhs.subLayerCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t gopFrameCount = {};
|
|
uint32_t idrPeriod = {};
|
|
uint32_t consecutiveBFrameCount = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown;
|
|
uint8_t subLayerCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == sizeof( VkVideoEncodeH265RateControlInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265RateControlInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265RateControlLayerInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT(uint8_t temporalId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: temporalId( temporalId_ ), useInitialRcQp( useInitialRcQp_ ), initialRcQp( initialRcQp_ ), useMinQp( useMinQp_ ), minQp( minQp_ ), useMaxQp( useMaxQp_ ), maxQp( maxQp_ ), useMaxFrameSize( useMaxFrameSize_ ), maxFrameSize( maxFrameSize_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265RateControlLayerInfoEXT( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265RateControlLayerInfoEXT( *reinterpret_cast<VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265RateControlLayerInfoEXT & operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265RateControlLayerInfoEXT & operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
temporalId = temporalId_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useInitialRcQp = useInitialRcQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialRcQp = initialRcQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMinQp = useMinQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
minQp = minQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMaxQp = useMaxQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxQp = maxQp_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
useMaxFrameSize = useMaxFrameSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxFrameSize = maxFrameSize_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265RateControlLayerInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265RateControlLayerInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint8_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, temporalId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265RateControlLayerInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( temporalId == rhs.temporalId )
|
|
&& ( useInitialRcQp == rhs.useInitialRcQp )
|
|
&& ( initialRcQp == rhs.initialRcQp )
|
|
&& ( useMinQp == rhs.useMinQp )
|
|
&& ( minQp == rhs.minQp )
|
|
&& ( useMaxQp == rhs.useMaxQp )
|
|
&& ( maxQp == rhs.maxQp )
|
|
&& ( useMaxFrameSize == rhs.useMaxFrameSize )
|
|
&& ( maxFrameSize == rhs.maxFrameSize );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT;
|
|
const void * pNext = {};
|
|
uint8_t temporalId = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp = {};
|
|
VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value, "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265RateControlLayerInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265RateControlLayerInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265SessionCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265SessionCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), pStdExtensionVersion( pStdExtensionVersion_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT( VideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionCreateInfoEXT( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265SessionCreateInfoEXT( *reinterpret_cast<VideoEncodeH265SessionCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265SessionCreateInfoEXT & operator=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionCreateInfoEXT & operator=( VkVideoEncodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoEXT & setPStdExtensionVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pStdExtensionVersion = pStdExtensionVersion_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265SessionCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT const &, const VULKAN_HPP_NAMESPACE::ExtensionProperties * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, pStdExtensionVersion );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265SessionCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pStdExtensionVersion == rhs.pStdExtensionVersion );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoEXT;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeH265CreateFlagsEXT flags = {};
|
|
const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH265SessionCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value, "VideoEncodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionCreateInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265SessionCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265SessionParametersAddInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(uint32_t vpsStdCount_ = {}, const StdVideoH265VideoParameterSet * pVpsStd_ = {}, uint32_t spsStdCount_ = {}, const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH265PictureParameterSet * pPpsStd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: vpsStdCount( vpsStdCount_ ), pVpsStd( pVpsStd_ ), spsStdCount( spsStdCount_ ), pSpsStd( pSpsStd_ ), ppsStdCount( ppsStdCount_ ), pPpsStd( pPpsStd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265SessionParametersAddInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & vpsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ = {} )
|
|
: vpsStdCount( static_cast<uint32_t>( vpsStd_.size() ) ), pVpsStd( vpsStd_.data() ), spsStdCount( static_cast<uint32_t>( spsStd_.size() ) ), pSpsStd( spsStd_.data() ), ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) ), pPpsStd( ppsStd_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265SessionParametersAddInfoEXT & operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionParametersAddInfoEXT & operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setVpsStdCount( uint32_t vpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vpsStdCount = vpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPVpsStd( const StdVideoH265VideoParameterSet * pVpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVpsStd = pVpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265SessionParametersAddInfoEXT & setVpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265VideoParameterSet> const & vpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
vpsStdCount = static_cast<uint32_t>( vpsStd_.size() );
|
|
pVpsStd = vpsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = spsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSpsStd = pSpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265SessionParametersAddInfoEXT & setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
spsStdCount = static_cast<uint32_t>( spsStd_.size() );
|
|
pSpsStd = spsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = ppsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pPpsStd = pPpsStd_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265SessionParametersAddInfoEXT & setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
|
|
pPpsStd = ppsStd_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265SessionParametersAddInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const StdVideoH265VideoParameterSet * const &, uint32_t const &, const StdVideoH265SequenceParameterSet * const &, uint32_t const &, const StdVideoH265PictureParameterSet * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265SessionParametersAddInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( vpsStdCount == rhs.vpsStdCount )
|
|
&& ( pVpsStd == rhs.pVpsStd )
|
|
&& ( spsStdCount == rhs.spsStdCount )
|
|
&& ( pSpsStd == rhs.pSpsStd )
|
|
&& ( ppsStdCount == rhs.ppsStdCount )
|
|
&& ( pPpsStd == rhs.pPpsStd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t vpsStdCount = {};
|
|
const StdVideoH265VideoParameterSet * pVpsStd = {};
|
|
uint32_t spsStdCount = {};
|
|
const StdVideoH265SequenceParameterSet * pSpsStd = {};
|
|
uint32_t ppsStdCount = {};
|
|
const StdVideoH265PictureParameterSet * pPpsStd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value, "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersAddInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265SessionParametersAddInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265SessionParametersCreateInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(uint32_t maxVpsStdCount_ = {}, uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: maxVpsStdCount( maxVpsStdCount_ ), maxSpsStdCount( maxSpsStdCount_ ), maxPpsStdCount( maxPpsStdCount_ ), pParametersAddInfo( pParametersAddInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265SessionParametersCreateInfoEXT( *reinterpret_cast<VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxVpsStdCount( uint32_t maxVpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxVpsStdCount = maxVpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxSpsStdCount = maxSpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxPpsStdCount = maxPpsStdCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pParametersAddInfo = pParametersAddInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265SessionParametersCreateInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265SessionParametersCreateInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( maxVpsStdCount == rhs.maxVpsStdCount )
|
|
&& ( maxSpsStdCount == rhs.maxSpsStdCount )
|
|
&& ( maxPpsStdCount == rhs.maxPpsStdCount )
|
|
&& ( pParametersAddInfo == rhs.pParametersAddInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT;
|
|
const void * pNext = {};
|
|
uint32_t maxVpsStdCount = {};
|
|
uint32_t maxSpsStdCount = {};
|
|
uint32_t maxPpsStdCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value, "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265SessionParametersCreateInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeH265VclFrameInfoEXT
|
|
{
|
|
using NativeType = VkVideoEncodeH265VclFrameInfoEXT;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ = {}, uint32_t naluSliceSegmentEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries_ = {}, const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ ), pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ ), pCurrentPictureInfo( pCurrentPictureInfo_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265VclFrameInfoEXT( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeH265VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH265VclFrameInfoEXT const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT> const & naluSliceSegmentEntries_, const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {} )
|
|
: pReferenceFinalLists( pReferenceFinalLists_ ), naluSliceSegmentEntryCount( static_cast<uint32_t>( naluSliceSegmentEntries_.size() ) ), pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ), pCurrentPictureInfo( pCurrentPictureInfo_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeH265VclFrameInfoEXT & operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeH265VclFrameInfoEXT & operator=( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceFinalLists = pReferenceFinalLists_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT> const & naluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
naluSliceSegmentEntryCount = static_cast<uint32_t>( naluSliceSegmentEntries_.size() );
|
|
pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pCurrentPictureInfo = pCurrentPictureInfo_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeH265VclFrameInfoEXT*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeH265VclFrameInfoEXT*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * const &, const StdVideoEncodeH265PictureInfo * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeH265VclFrameInfoEXT const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( pReferenceFinalLists == rhs.pReferenceFinalLists )
|
|
&& ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount )
|
|
&& ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries )
|
|
&& ( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT;
|
|
const void * pNext = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT * pReferenceFinalLists = {};
|
|
uint32_t naluSliceSegmentEntryCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT * pNaluSliceSegmentEntries = {};
|
|
const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) == sizeof( VkVideoEncodeH265VclFrameInfoEXT ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT>::value, "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeH265VclFrameInfoEXT>
|
|
{
|
|
using Type = VideoEncodeH265VclFrameInfoEXT;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeInfoKHR
|
|
{
|
|
using NativeType = VkVideoEncodeInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, uint32_t qualityLevel_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ = {}, uint32_t precedingExternallyEncodedBytes_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), qualityLevel( qualityLevel_ ), codedExtent( codedExtent_ ), dstBitstreamBuffer( dstBitstreamBuffer_ ), dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ), dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ), srcPictureResource( srcPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( referenceSlotCount_ ), pReferenceSlots( pReferenceSlots_ ), precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, uint32_t qualityLevel_, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_, uint32_t precedingExternallyEncodedBytes_ = {} )
|
|
: flags( flags_ ), qualityLevel( qualityLevel_ ), codedExtent( codedExtent_ ), dstBitstreamBuffer( dstBitstreamBuffer_ ), dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ), dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ), srcPictureResource( srcPictureResource_ ), pSetupReferenceSlot( pSetupReferenceSlot_ ), referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) ), pReferenceSlots( referenceSlots_.data() ), precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
qualityLevel = qualityLevel_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
codedExtent = codedExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBitstreamBuffer = dstBitstreamBuffer_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBitstreamBufferOffset = dstBitstreamBufferOffset_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
srcPictureResource = srcPictureResource_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pSetupReferenceSlot = pSetupReferenceSlot_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = referenceSlotCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReferenceSlots = pReferenceSlots_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeInfoKHR & setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
|
|
pReferenceSlots = referenceSlots_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Buffer const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, qualityLevel, codedExtent, dstBitstreamBuffer, dstBitstreamBufferOffset, dstBitstreamBufferMaxRange, srcPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots, precedingExternallyEncodedBytes );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( qualityLevel == rhs.qualityLevel )
|
|
&& ( codedExtent == rhs.codedExtent )
|
|
&& ( dstBitstreamBuffer == rhs.dstBitstreamBuffer )
|
|
&& ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset )
|
|
&& ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange )
|
|
&& ( srcPictureResource == rhs.srcPictureResource )
|
|
&& ( pSetupReferenceSlot == rhs.pSetupReferenceSlot )
|
|
&& ( referenceSlotCount == rhs.referenceSlotCount )
|
|
&& ( pReferenceSlots == rhs.pReferenceSlots )
|
|
&& ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {};
|
|
uint32_t qualityLevel = {};
|
|
VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {};
|
|
VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {};
|
|
VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {};
|
|
uint32_t referenceSlotCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots = {};
|
|
uint32_t precedingExternallyEncodedBytes = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "VideoEncodeInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
|
|
{
|
|
using Type = VideoEncodeInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeRateControlLayerInfoKHR
|
|
{
|
|
using NativeType = VkVideoEncodeRateControlLayerInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR(uint32_t averageBitrate_ = {}, uint32_t maxBitrate_ = {}, uint32_t frameRateNumerator_ = {}, uint32_t frameRateDenominator_ = {}, uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: averageBitrate( averageBitrate_ ), maxBitrate( maxBitrate_ ), frameRateNumerator( frameRateNumerator_ ), frameRateDenominator( frameRateDenominator_ ), virtualBufferSizeInMs( virtualBufferSizeInMs_ ), initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast<VideoEncodeRateControlLayerInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
averageBitrate = averageBitrate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxBitrate = maxBitrate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameRateNumerator = frameRateNumerator_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
frameRateDenominator = frameRateDenominator_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
virtualBufferSizeInMs = virtualBufferSizeInMs_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeRateControlLayerInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeRateControlLayerInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator, virtualBufferSizeInMs, initialVirtualBufferSizeInMs );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( averageBitrate == rhs.averageBitrate )
|
|
&& ( maxBitrate == rhs.maxBitrate )
|
|
&& ( frameRateNumerator == rhs.frameRateNumerator )
|
|
&& ( frameRateDenominator == rhs.frameRateDenominator )
|
|
&& ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs )
|
|
&& ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t averageBitrate = {};
|
|
uint32_t maxBitrate = {};
|
|
uint32_t frameRateNumerator = {};
|
|
uint32_t frameRateDenominator = {};
|
|
uint32_t virtualBufferSizeInMs = {};
|
|
uint32_t initialVirtualBufferSizeInMs = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value, "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeRateControlLayerInfoKHR>
|
|
{
|
|
using Type = VideoEncodeRateControlLayerInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEncodeRateControlInfoKHR
|
|
{
|
|
using NativeType = VkVideoEncodeRateControlInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, uint8_t layerCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), rateControlMode( rateControlMode_ ), layerCount( layerCount_ ), pLayerConfigs( pLayerConfigs_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ )
|
|
: flags( flags_ ), rateControlMode( rateControlMode_ ), layerCount( static_cast<uint8_t>( layerConfigs_.size() ) ), pLayerConfigs( layerConfigs_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
rateControlMode = rateControlMode_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint8_t layerCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = layerCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayerConfigs( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pLayerConfigs = pLayerConfigs_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
VideoEncodeRateControlInfoKHR & setLayerConfigs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR> const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
layerCount = static_cast<uint8_t>( layerConfigs_.size() );
|
|
pLayerConfigs = layerConfigs_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR const &, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR const &, uint8_t const &, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayerConfigs );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( rateControlMode == rhs.rateControlMode )
|
|
&& ( layerCount == rhs.layerCount )
|
|
&& ( pLayerConfigs == rhs.pLayerConfigs );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone;
|
|
uint8_t layerCount = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
|
|
{
|
|
using Type = VideoEncodeRateControlInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoEndCodingInfoKHR
|
|
{
|
|
using NativeType = VkVideoEndCodingInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoEndCodingInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoEndCodingInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoEndCodingInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "VideoEndCodingInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
|
|
{
|
|
using Type = VideoEndCodingInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoFormatPropertiesKHR
|
|
{
|
|
using NativeType = VkVideoFormatPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
|
|
: format( format_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
|
|
explicit operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoFormatPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoFormatPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::Format const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, format );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoFormatPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( format == rhs.format );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "VideoFormatPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
|
|
{
|
|
using Type = VideoFormatPropertiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoGetMemoryPropertiesKHR
|
|
{
|
|
using NativeType = VkVideoGetMemoryPropertiesKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: memoryBindIndex( memoryBindIndex_ ), pMemoryRequirements( pMemoryRequirements_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoGetMemoryPropertiesKHR( *reinterpret_cast<VideoGetMemoryPropertiesKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoGetMemoryPropertiesKHR & operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
memoryBindIndex = memoryBindIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pMemoryRequirements = pMemoryRequirements_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoGetMemoryPropertiesKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoGetMemoryPropertiesKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, memoryBindIndex, pMemoryRequirements );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( memoryBindIndex == rhs.memoryBindIndex )
|
|
&& ( pMemoryRequirements == rhs.pMemoryRequirements );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR;
|
|
const void * pNext = {};
|
|
uint32_t memoryBindIndex = {};
|
|
VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR ) == sizeof( VkVideoGetMemoryPropertiesKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR>::value, "VideoGetMemoryPropertiesKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoGetMemoryPropertiesKHR>
|
|
{
|
|
using Type = VideoGetMemoryPropertiesKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoQueueFamilyProperties2KHR
|
|
{
|
|
using NativeType = VkVideoQueueFamilyProperties2KHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: videoCodecOperations( videoCodecOperations_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoQueueFamilyProperties2KHR( *reinterpret_cast<VideoQueueFamilyProperties2KHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoQueueFamilyProperties2KHR & operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setVideoCodecOperations( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoCodecOperations = videoCodecOperations_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoQueueFamilyProperties2KHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoQueueFamilyProperties2KHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, void * const &, VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, videoCodecOperations );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( videoCodecOperations == rhs.videoCodecOperations );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR;
|
|
void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR ) == sizeof( VkVideoQueueFamilyProperties2KHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR>::value, "VideoQueueFamilyProperties2KHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoQueueFamilyProperties2KHR>
|
|
{
|
|
using Type = VideoQueueFamilyProperties2KHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoSessionCreateInfoKHR
|
|
{
|
|
using NativeType = VkVideoSessionCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ = {}, VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t maxReferencePicturesSlotsCount_ = {}, uint32_t maxReferencePicturesActiveCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: queueFamilyIndex( queueFamilyIndex_ ), flags( flags_ ), pVideoProfile( pVideoProfile_ ), pictureFormat( pictureFormat_ ), maxCodedExtent( maxCodedExtent_ ), referencePicturesFormat( referencePicturesFormat_ ), maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ), maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
queueFamilyIndex = queueFamilyIndex_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pVideoProfile = pVideoProfile_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pictureFormat = pictureFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxCodedExtent = maxCodedExtent_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
referencePicturesFormat = referencePicturesFormat_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoSessionCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoSessionCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR const &, const VULKAN_HPP_NAMESPACE::VideoProfileKHR * const &, VULKAN_HPP_NAMESPACE::Format const &, VULKAN_HPP_NAMESPACE::Extent2D const &, VULKAN_HPP_NAMESPACE::Format const &, uint32_t const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, queueFamilyIndex, flags, pVideoProfile, pictureFormat, maxCodedExtent, referencePicturesFormat, maxReferencePicturesSlotsCount, maxReferencePicturesActiveCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( queueFamilyIndex == rhs.queueFamilyIndex )
|
|
&& ( flags == rhs.flags )
|
|
&& ( pVideoProfile == rhs.pVideoProfile )
|
|
&& ( pictureFormat == rhs.pictureFormat )
|
|
&& ( maxCodedExtent == rhs.maxCodedExtent )
|
|
&& ( referencePicturesFormat == rhs.referencePicturesFormat )
|
|
&& ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount )
|
|
&& ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t queueFamilyIndex = {};
|
|
VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {};
|
|
const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile = {};
|
|
VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {};
|
|
VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
|
|
uint32_t maxReferencePicturesSlotsCount = {};
|
|
uint32_t maxReferencePicturesActiveCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
|
|
{
|
|
using Type = VideoSessionCreateInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoSessionParametersCreateInfoKHR
|
|
{
|
|
using NativeType = VkVideoSessionParametersCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: videoSessionParametersTemplate( videoSessionParametersTemplate_ ), videoSession( videoSession_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoSessionParametersTemplate = videoSessionParametersTemplate_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
videoSession = videoSession_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const &, VULKAN_HPP_NAMESPACE::VideoSessionKHR const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, videoSessionParametersTemplate, videoSession );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate )
|
|
&& ( videoSession == rhs.videoSession );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
|
|
VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value, "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
|
|
{
|
|
using Type = VideoSessionParametersCreateInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_ENABLE_BETA_EXTENSIONS )
|
|
struct VideoSessionParametersUpdateInfoKHR
|
|
{
|
|
using NativeType = VkVideoSessionParametersUpdateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR(uint32_t updateSequenceCount_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: updateSequenceCount( updateSequenceCount_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
updateSequenceCount = updateSequenceCount_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, updateSequenceCount );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( updateSequenceCount == rhs.updateSequenceCount );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t updateSequenceCount = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value, "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
|
|
{
|
|
using Type = VideoSessionParametersUpdateInfoKHR;
|
|
};
|
|
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
|
|
struct WaylandSurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkWaylandSurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display * display_ = {}, struct wl_surface * surface_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), display( display_ ), surface( surface_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
display = display_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
surface = surface_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR const &, struct wl_display * const &, struct wl_surface * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, display, surface );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( display == rhs.display )
|
|
&& ( surface == rhs.surface );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
|
|
struct wl_display * display = {};
|
|
struct wl_surface * surface = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
|
|
{
|
|
using Type = WaylandSurfaceCreateInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct Win32KeyedMutexAcquireReleaseInfoKHR
|
|
{
|
|
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
|
|
: acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
|
|
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
|
|
VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
|
|
#else
|
|
if ( acquireSyncs_.size() != acquireKeys_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
|
|
}
|
|
if ( acquireSyncs_.size() != acquireTimeouts_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
|
|
}
|
|
if ( acquireKeys_.size() != acquireTimeouts_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
|
|
#else
|
|
if ( releaseSyncs_.size() != releaseKeys_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = acquireCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireSyncs = pAcquireSyncs_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
|
|
pAcquireSyncs = acquireSyncs_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireKeys = pAcquireKeys_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
|
|
pAcquireKeys = acquireKeys_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireTimeouts = pAcquireTimeouts_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
|
|
pAcquireTimeouts = acquireTimeouts_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = releaseCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReleaseSyncs = pReleaseSyncs_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
|
|
pReleaseSyncs = releaseSyncs_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReleaseKeys = pReleaseKeys_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
|
|
pReleaseKeys = releaseKeys_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( acquireCount == rhs.acquireCount )
|
|
&& ( pAcquireSyncs == rhs.pAcquireSyncs )
|
|
&& ( pAcquireKeys == rhs.pAcquireKeys )
|
|
&& ( pAcquireTimeouts == rhs.pAcquireTimeouts )
|
|
&& ( releaseCount == rhs.releaseCount )
|
|
&& ( pReleaseSyncs == rhs.pReleaseSyncs )
|
|
&& ( pReleaseKeys == rhs.pReleaseKeys );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
|
|
const void * pNext = {};
|
|
uint32_t acquireCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
|
|
const uint64_t * pAcquireKeys = {};
|
|
const uint32_t * pAcquireTimeouts = {};
|
|
uint32_t releaseCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
|
|
const uint64_t * pReleaseKeys = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value, "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
|
|
{
|
|
using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct Win32KeyedMutexAcquireReleaseInfoNV
|
|
{
|
|
using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, const uint32_t * pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
|
|
: acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
|
|
VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
|
|
VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
|
|
#else
|
|
if ( acquireSyncs_.size() != acquireKeys_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
|
|
}
|
|
if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
|
|
}
|
|
if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
|
|
#else
|
|
if ( releaseSyncs_.size() != releaseKeys_.size() )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = acquireCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireSyncs = pAcquireSyncs_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
|
|
pAcquireSyncs = acquireSyncs_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireKeys = pAcquireKeys_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
|
|
pAcquireKeys = acquireKeys_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
|
|
pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = releaseCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReleaseSyncs = pReleaseSyncs_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
|
|
pReleaseSyncs = releaseSyncs_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pReleaseKeys = pReleaseKeys_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
|
|
pReleaseKeys = releaseKeys_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
|
|
}
|
|
|
|
explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &, const uint32_t * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::DeviceMemory * const &, const uint64_t * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default;
|
|
#else
|
|
bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( acquireCount == rhs.acquireCount )
|
|
&& ( pAcquireSyncs == rhs.pAcquireSyncs )
|
|
&& ( pAcquireKeys == rhs.pAcquireKeys )
|
|
&& ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
|
|
&& ( releaseCount == rhs.releaseCount )
|
|
&& ( pReleaseSyncs == rhs.pReleaseSyncs )
|
|
&& ( pReleaseKeys == rhs.pReleaseKeys );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
|
|
const void * pNext = {};
|
|
uint32_t acquireCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
|
|
const uint64_t * pAcquireKeys = {};
|
|
const uint32_t * pAcquireTimeoutMilliseconds = {};
|
|
uint32_t releaseCount = {};
|
|
const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {};
|
|
const uint64_t * pReleaseKeys = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value, "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
|
|
{
|
|
using Type = Win32KeyedMutexAcquireReleaseInfoNV;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_WIN32_KHR )
|
|
struct Win32SurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkWin32SurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hinstance = hinstance_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
hwnd = hwnd_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR const &, HINSTANCE const &, HWND const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, hinstance, hwnd );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default;
|
|
#else
|
|
bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( hinstance == rhs.hinstance )
|
|
&& ( hwnd == rhs.hwnd );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
|
|
HINSTANCE hinstance = {};
|
|
HWND hwnd = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
|
|
{
|
|
using Type = Win32SurfaceCreateInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
|
|
|
|
struct WriteDescriptorSet
|
|
{
|
|
using NativeType = VkWriteDescriptorSet;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
|
|
: dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
|
|
{
|
|
#ifdef VULKAN_HPP_NO_EXCEPTIONS
|
|
VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1);
|
|
#else
|
|
if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
|
|
{
|
|
throw LogicError( VULKAN_HPP_NAMESPACE_STRING"::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
|
|
}
|
|
#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstSet = dstSet_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstBinding = dstBinding_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dstArrayElement = dstArrayElement_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = descriptorCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorType = descriptorType_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pImageInfo = pImageInfo_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
|
|
pImageInfo = imageInfo_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pBufferInfo = pBufferInfo_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
|
|
pBufferInfo = bufferInfo_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pTexelBufferView = pTexelBufferView_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
|
|
pTexelBufferView = texelBufferView_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
|
|
}
|
|
|
|
explicit operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSet*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::DescriptorSet const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DescriptorType const &, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * const &, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * const &, const VULKAN_HPP_NAMESPACE::BufferView * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( WriteDescriptorSet const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dstSet == rhs.dstSet )
|
|
&& ( dstBinding == rhs.dstBinding )
|
|
&& ( dstArrayElement == rhs.dstArrayElement )
|
|
&& ( descriptorCount == rhs.descriptorCount )
|
|
&& ( descriptorType == rhs.descriptorType )
|
|
&& ( pImageInfo == rhs.pImageInfo )
|
|
&& ( pBufferInfo == rhs.pBufferInfo )
|
|
&& ( pTexelBufferView == rhs.pTexelBufferView );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
|
|
uint32_t dstBinding = {};
|
|
uint32_t dstArrayElement = {};
|
|
uint32_t descriptorCount = {};
|
|
VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
|
|
const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {};
|
|
const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {};
|
|
const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "WriteDescriptorSet is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSet>
|
|
{
|
|
using Type = WriteDescriptorSet;
|
|
};
|
|
|
|
struct WriteDescriptorSetAccelerationStructureKHR
|
|
{
|
|
using NativeType = VkWriteDescriptorSetAccelerationStructureKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ )
|
|
: accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureCount = accelerationStructureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAccelerationStructures = pAccelerationStructures_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
|
|
pAccelerationStructures = accelerationStructures_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructureCount == rhs.accelerationStructureCount )
|
|
&& ( pAccelerationStructures == rhs.pAccelerationStructures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
|
|
const void * pNext = {};
|
|
uint32_t accelerationStructureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value, "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
|
|
{
|
|
using Type = WriteDescriptorSetAccelerationStructureKHR;
|
|
};
|
|
|
|
struct WriteDescriptorSetAccelerationStructureNV
|
|
{
|
|
using NativeType = VkWriteDescriptorSetAccelerationStructureNV;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSetAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ )
|
|
: accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureCount = accelerationStructureCount_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pAccelerationStructures = pAccelerationStructures_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
|
|
pAccelerationStructures = accelerationStructures_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
|
|
}
|
|
|
|
explicit operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( accelerationStructureCount == rhs.accelerationStructureCount )
|
|
&& ( pAccelerationStructures == rhs.pAccelerationStructures );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
|
|
const void * pNext = {};
|
|
uint32_t accelerationStructureCount = {};
|
|
const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value, "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
|
|
{
|
|
using Type = WriteDescriptorSetAccelerationStructureNV;
|
|
};
|
|
|
|
struct WriteDescriptorSetInlineUniformBlock
|
|
{
|
|
using NativeType = VkWriteDescriptorSetInlineUniformBlock;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock(uint32_t dataSize_ = {}, const void * pData_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: dataSize( dataSize_ ), pData( pData_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: WriteDescriptorSetInlineUniformBlock( *reinterpret_cast<WriteDescriptorSetInlineUniformBlock const *>( &rhs ) )
|
|
{}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
|
|
: dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
|
|
{}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = dataSize_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pData = pData_;
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
|
|
template <typename T>
|
|
WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
|
|
pData = data_.data();
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlock*>( this );
|
|
}
|
|
|
|
explicit operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlock*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, uint32_t const &, const void * const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, dataSize, pData );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default;
|
|
#else
|
|
bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
return this->reflect() == rhs.reflect();
|
|
#else
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( dataSize == rhs.dataSize )
|
|
&& ( pData == rhs.pData );
|
|
#endif
|
|
}
|
|
|
|
bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
#endif
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock;
|
|
const void * pNext = {};
|
|
uint32_t dataSize = {};
|
|
const void * pData = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value, "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlock>
|
|
{
|
|
using Type = WriteDescriptorSetInlineUniformBlock;
|
|
};
|
|
using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
|
|
|
|
#if defined( VK_USE_PLATFORM_XCB_KHR )
|
|
struct XcbSurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkXcbSurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t * connection_ = {}, xcb_window_t window_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), connection( connection_ ), window( window_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
connection = connection_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
window = window_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR const &, xcb_connection_t * const &, xcb_window_t const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, connection, window );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( connection == rhs.connection )
|
|
&& ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
|
|
xcb_connection_t * connection = {};
|
|
xcb_window_t window = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
|
|
{
|
|
using Type = XcbSurfaceCreateInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_XCB_KHR*/
|
|
|
|
#if defined( VK_USE_PLATFORM_XLIB_KHR )
|
|
struct XlibSurfaceCreateInfoKHR
|
|
{
|
|
using NativeType = VkXlibSurfaceCreateInfoKHR;
|
|
|
|
static const bool allowDuplicate = false;
|
|
static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
|
|
VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display * dpy_ = {}, Window window_ = {}) VULKAN_HPP_NOEXCEPT
|
|
: flags( flags_ ), dpy( dpy_ ), window( window_ )
|
|
{}
|
|
|
|
VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
: XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
|
|
{}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/
|
|
|
|
XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
|
|
|
|
XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
*this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
|
|
return *this;
|
|
}
|
|
|
|
#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
|
|
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
pNext = pNext_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
flags = flags_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
dpy = dpy_;
|
|
return *this;
|
|
}
|
|
|
|
VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
|
|
{
|
|
window = window_;
|
|
return *this;
|
|
}
|
|
#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/
|
|
|
|
|
|
explicit operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
explicit operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
|
|
}
|
|
|
|
#if defined( VULKAN_HPP_USE_REFLECT )
|
|
#if 14 <= VULKAN_HPP_CPP_VERSION
|
|
auto
|
|
#else
|
|
std::tuple<VULKAN_HPP_NAMESPACE::StructureType const &, const void * const &, VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR const &, Display * const &, Window const &>
|
|
#endif
|
|
reflect() const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return std::tie( sType, pNext, flags, dpy, window );
|
|
}
|
|
#endif
|
|
|
|
|
|
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
|
|
std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp;
|
|
if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp;
|
|
if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp;
|
|
if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) return cmp;
|
|
if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 )
|
|
return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater;
|
|
|
|
return std::strong_ordering::equivalent;
|
|
}
|
|
#endif
|
|
|
|
bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return ( sType == rhs.sType )
|
|
&& ( pNext == rhs.pNext )
|
|
&& ( flags == rhs.flags )
|
|
&& ( dpy == rhs.dpy )
|
|
&& ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
|
|
}
|
|
|
|
bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
|
|
{
|
|
return !operator==( rhs );
|
|
}
|
|
|
|
public:
|
|
VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
|
|
const void * pNext = {};
|
|
VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
|
|
Display * dpy = {};
|
|
Window window = {};
|
|
|
|
};
|
|
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
|
|
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
|
|
|
|
template <>
|
|
struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
|
|
{
|
|
using Type = XlibSurfaceCreateInfoKHR;
|
|
};
|
|
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
|
|
|
|
|
|
} // namespace VULKAN_HPP_NAMESPACE
|
|
#endif
|