diff --git a/Vulkan-Headers b/Vulkan-Headers index 9b9fd87..63af1cf 160000 --- a/Vulkan-Headers +++ b/Vulkan-Headers @@ -1 +1 @@ -Subproject commit 9b9fd871b08110cd8f0b74e721b03213d9cc3081 +Subproject commit 63af1cf1ee906ba4dcd5a324bdd0201d4f4bfd12 diff --git a/vulkan/vulkan.hpp b/vulkan/vulkan.hpp index 2b7c801..a8037e8 100644 --- a/vulkan/vulkan.hpp +++ b/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 245, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 246, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -5854,6 +5854,35 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); } + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + //=== VK_QCOM_tile_properties === VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, @@ -6543,6 +6572,14 @@ namespace VULKAN_HPP_NAMESPACE CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} }; + class IncompatibleShaderBinaryEXTError : public SystemError + { + public: + IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} + + IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} + }; + namespace { [[noreturn]] void throwResultException( Result result, char const * message ) @@ -6587,6 +6624,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); # endif /*VK_ENABLE_BETA_EXTENSIONS*/ case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); + case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message ); default: throw SystemError( make_error_code( result ), message ); } } @@ -7971,6 +8009,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> struct StructExtends { @@ -12257,6 +12304,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_shader_tile_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_opacity_micromap === template <> struct StructExtends @@ -12913,6 +12988,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_shader_object === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_QCOM_tile_properties === template <> struct StructExtends @@ -14263,6 +14366,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + //=== VK_QCOM_tile_properties === PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; @@ -15545,6 +15654,12 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -16496,6 +16611,12 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = diff --git a/vulkan/vulkan_enums.hpp b/vulkan/vulkan_enums.hpp index f1c425b..fa351b9 100644 --- a/vulkan/vulkan_enums.hpp +++ b/vulkan/vulkan_enums.hpp @@ -80,7 +80,8 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT + eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, + eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT }; enum class StructureType @@ -989,6 +990,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, + ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT, + ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT, eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT, eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT, eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT, @@ -1055,6 +1058,10 @@ namespace VULKAN_HPP_NAMESPACE eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, + ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, + eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT, + eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, @@ -1130,7 +1137,8 @@ namespace VULKAN_HPP_NAMESPACE eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, #endif /*VK_USE_PLATFORM_FUCHSIA*/ eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, - eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV + eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT }; enum class VendorId @@ -6471,6 +6479,37 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; }; + //=== VK_EXT_shader_object === + + enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT + { + eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT, + eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, + eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, + eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT, + eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, + eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT + }; + + using ShaderCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = + ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | + ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | + ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment; + }; + + enum class ShaderCodeTypeEXT + { + eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT, + eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT + }; + //=== VK_NV_ray_tracing_invocation_reorder === enum class RayTracingInvocationReorderModeNV diff --git a/vulkan/vulkan_extension_inspection.hpp b/vulkan/vulkan_extension_inspection.hpp index ca36ac6..829aadb 100644 --- a/vulkan/vulkan_extension_inspection.hpp +++ b/vulkan/vulkan_extension_inspection.hpp @@ -223,7 +223,8 @@ namespace VULKAN_HPP_NAMESPACE ( name == "VK_EXT_pipeline_properties" ) || ( name == "VK_EXT_multisampled_render_to_single_sampled" ) || ( name == "VK_EXT_extended_dynamic_state2" ) || ( name == "VK_EXT_color_write_enable" ) || ( name == "VK_EXT_primitives_generated_query" ) || ( name == "VK_KHR_ray_tracing_maintenance1" ) || ( name == "VK_EXT_global_priority_query" ) || ( name == "VK_EXT_image_view_min_lod" ) || - ( name == "VK_EXT_multi_draw" ) || ( name == "VK_EXT_image_2d_view_of_3d" ) || ( name == "VK_EXT_opacity_micromap" ) || + ( name == "VK_EXT_multi_draw" ) || ( name == "VK_EXT_image_2d_view_of_3d" ) || ( name == "VK_EXT_shader_tile_image" ) || + ( name == "VK_EXT_opacity_micromap" ) || #if defined( VK_ENABLE_BETA_EXTENSIONS ) ( name == "VK_NV_displacement_micromap" ) || #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -235,8 +236,8 @@ namespace VULKAN_HPP_NAMESPACE ( name == "VK_EXT_image_compression_control_swapchain" ) || ( name == "VK_QCOM_image_processing" ) || ( name == "VK_EXT_extended_dynamic_state3" ) || ( name == "VK_EXT_subpass_merge_feedback" ) || ( name == "VK_EXT_shader_module_identifier" ) || ( name == "VK_EXT_rasterization_order_attachment_access" ) || ( name == "VK_NV_optical_flow" ) || ( name == "VK_EXT_legacy_dithering" ) || - ( name == "VK_EXT_pipeline_protected_access" ) || ( name == "VK_QCOM_tile_properties" ) || ( name == "VK_SEC_amigo_profiling" ) || - ( name == "VK_QCOM_multiview_per_view_viewports" ) || ( name == "VK_NV_ray_tracing_invocation_reorder" ) || + ( name == "VK_EXT_pipeline_protected_access" ) || ( name == "VK_EXT_shader_object" ) || ( name == "VK_QCOM_tile_properties" ) || + ( name == "VK_SEC_amigo_profiling" ) || ( name == "VK_QCOM_multiview_per_view_viewports" ) || ( name == "VK_NV_ray_tracing_invocation_reorder" ) || ( name == "VK_EXT_mutable_descriptor_type" ) || ( name == "VK_ARM_shader_core_builtins" ) || ( name == "VK_EXT_pipeline_library_group_handles" ) || ( name == "VK_QCOM_multiview_per_view_render_areas" ); } diff --git a/vulkan/vulkan_funcs.hpp b/vulkan/vulkan_funcs.hpp index bbfc036..3dccc24 100644 --- a/vulkan/vulkan_funcs.hpp +++ b/vulkan/vulkan_funcs.hpp @@ -21517,6 +21517,317 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateShadersEXT( m_device, + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaders ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + + return createResultValueType( static_cast( result ), shaders ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size(), shaderEXTAllocator ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + + return createResultValueType( static_cast( result ), shaders ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VkResult result = + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" ); + + return createResultValueType( static_cast( result ), shader ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, ShaderEXTAllocator>>::type + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + std::vector, ShaderEXTAllocator> uniqueShaders; + uniqueShaders.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueShaders ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, ShaderEXTAllocator>>::type + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); + uniqueShaders.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueShaders ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VkResult result = + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VkResult result; + do + { + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadersEXT( + m_commandBuffer, stageCount, reinterpret_cast( pStages ), reinterpret_cast( pShaders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindShadersEXT( m_commandBuffer, + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_QCOM_tile_properties === template diff --git a/vulkan/vulkan_handles.hpp b/vulkan/vulkan_handles.hpp index 0c4b8cf..15ee96b 100644 --- a/vulkan/vulkan_handles.hpp +++ b/vulkan/vulkan_handles.hpp @@ -433,6 +433,7 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PhysicalDeviceInlineUniformBlockFeatures; using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; struct PhysicalDeviceInlineUniformBlockProperties; @@ -1433,6 +1434,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_2d_view_of_3d === struct PhysicalDeviceImage2DViewOf3DFeaturesEXT; + //=== VK_EXT_shader_tile_image === + struct PhysicalDeviceShaderTileImageFeaturesEXT; + struct PhysicalDeviceShaderTileImagePropertiesEXT; + //=== VK_EXT_opacity_micromap === struct MicromapBuildInfoEXT; struct MicromapUsageEXT; @@ -1553,6 +1558,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_pipeline_protected_access === struct PhysicalDevicePipelineProtectedAccessFeaturesEXT; + //=== VK_EXT_shader_object === + struct PhysicalDeviceShaderObjectFeaturesEXT; + struct PhysicalDeviceShaderObjectPropertiesEXT; + struct ShaderCreateInfoEXT; + //=== VK_QCOM_tile_properties === struct PhysicalDeviceTilePropertiesFeaturesQCOM; struct TilePropertiesQCOM; @@ -1678,6 +1688,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_optical_flow === class OpticalFlowSessionNV; + //=== VK_EXT_shader_object === + class ShaderEXT; + #ifndef VULKAN_HPP_NO_SMART_HANDLE //====================== //=== UNIQUE HANDLEs === @@ -2082,6 +2095,16 @@ namespace VULKAN_HPP_NAMESPACE }; using UniqueOpticalFlowSessionNV = UniqueHandle; + + //=== VK_EXT_shader_object === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueShaderEXT = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //=============== @@ -3309,6 +3332,87 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using NativeType = VkShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR ShaderEXT() = default; + + VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = shaderEXT; + return *this; + } +#endif + + ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderEXT const & ) const = default; +#else + bool operator==( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == rhs.m_shaderEXT; + } + + bool operator!=( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != rhs.m_shaderEXT; + } + + bool operator<( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT < rhs.m_shaderEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == VK_NULL_HANDLE; + } + + private: + VkShaderEXT m_shaderEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Image { public: @@ -6093,6 +6197,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === + + template + void bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT { return m_commandBuffer; @@ -12637,6 +12755,97 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_EXT_shader_object === + + template + VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = ShaderEXTAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = ShaderEXTAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( + VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_QCOM_tile_properties === template diff --git a/vulkan/vulkan_hash.hpp b/vulkan/vulkan_hash.hpp index 031883f..86eeecf 100644 --- a/vulkan/vulkan_hash.hpp +++ b/vulkan/vulkan_hash.hpp @@ -492,6 +492,17 @@ namespace std } }; + //=== VK_EXT_shader_object === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderEXT ) ); + } + }; + #if 14 <= VULKAN_HPP_CPP_VERSION //====================================== //=== HASH structures for structures === @@ -10074,6 +10085,38 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion ); + return seed; + } + }; + template <> struct hash { @@ -10145,6 +10188,38 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation ); + return seed; + } + }; + template <> struct hash { @@ -12680,6 +12755,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode ); + for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo ); + return seed; + } + }; + template <> struct hash { diff --git a/vulkan/vulkan_raii.hpp b/vulkan/vulkan_raii.hpp index 7fd612a..1d608c4 100644 --- a/vulkan/vulkan_raii.hpp +++ b/vulkan/vulkan_raii.hpp @@ -1593,6 +1593,12 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -2384,6 +2390,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + //=== VK_QCOM_tile_properties === PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; @@ -2480,6 +2492,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_optical_flow === class OpticalFlowSessionNV; + //=== VK_EXT_shader_object === + class ShaderEXT; + //==================== //=== RAII HANDLES === //==================== @@ -4139,6 +4154,16 @@ namespace VULKAN_HPP_NAMESPACE createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD std::vector + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderEXT + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + //=== VK_QCOM_tile_properties === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM @@ -5887,6 +5912,11 @@ namespace VULKAN_HPP_NAMESPACE void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_object === + + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const; + private: VULKAN_HPP_NAMESPACE::Device m_device = {}; VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; @@ -10269,6 +10299,172 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; }; + class ShaderEXT + { + public: + using CType = VkShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCreateShadersEXT( static_cast( *device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_shader ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + throwResultException( result, "vkCreateShadersEXT" ); + } + } + + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderEXT shader, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_shader( shader ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ShaderEXT( std::nullptr_t ) {} + + ~ShaderEXT() + { + clear(); + } + + ShaderEXT() = delete; + ShaderEXT( ShaderEXT const & ) = delete; + + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ShaderEXT & operator=( ShaderEXT const & ) = delete; + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + clear(); + m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); + m_shader = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ); + m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); + m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shader; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_shader ) + { + getDispatcher()->vkDestroyShaderEXT( + static_cast( m_device ), static_cast( m_shader ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_shader = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shader, rhs.m_shader ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD std::vector getBinaryData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ShaderEXTs : public std::vector + { + public: + ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateShadersEXT( + static_cast( *device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + shaders.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( createInfos.size() ); + for ( auto const & shaderEXT : shaders ) + { + this->emplace_back( device, shaderEXT, allocator ); + } + } + else + { + throwResultException( result, "vkCreateShadersEXT" ); + } + } + + ShaderEXTs( std::nullptr_t ) {} + + ShaderEXTs() = delete; + ShaderEXTs( ShaderEXTs const & ) = delete; + ShaderEXTs( ShaderEXTs && rhs ) = default; + ShaderEXTs & operator=( ShaderEXTs const & ) = delete; + ShaderEXTs & operator=( ShaderEXTs && rhs ) = default; + }; + class ShaderModule { public: @@ -13598,14 +13794,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && + "Function requires or or " ); getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && + "Function requires or or " ); getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } @@ -13613,7 +13811,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } @@ -13622,7 +13820,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); @@ -13632,7 +13830,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); @@ -13645,7 +13843,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 && - "Function requires or " ); + "Function requires or or " ); if ( buffers.size() != offsets.size() ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); @@ -13671,7 +13869,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } @@ -13679,7 +13877,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } @@ -13687,7 +13885,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } @@ -13695,7 +13893,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } @@ -13703,7 +13901,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } @@ -13714,7 +13912,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && + "Function requires or or " ); getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), static_cast( faceMask ), @@ -13727,7 +13926,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } @@ -13735,7 +13934,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } @@ -13743,7 +13942,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } @@ -17779,7 +17978,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } @@ -17787,7 +17986,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } @@ -17795,7 +17994,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } @@ -17804,7 +18003,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); @@ -17814,7 +18013,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); @@ -17828,7 +18027,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && - "Function requires or " ); + "Function requires or or " ); if ( buffers.size() != offsets.size() ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); @@ -17854,7 +18053,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } @@ -17862,7 +18061,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } @@ -17870,7 +18069,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } @@ -17878,7 +18077,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } @@ -17886,7 +18085,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } @@ -17898,7 +18097,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), static_cast( faceMask ), @@ -18717,7 +18916,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && + "Function requires or " ); getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), vertexBindingDescriptions.size(), @@ -18912,15 +19112,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function requires or or " ); getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } @@ -18928,22 +19129,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function requires or " ); getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); } VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } @@ -19431,7 +19634,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); @@ -19439,14 +19642,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); } VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && + "Function requires or " ); getDispatcher()->vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); } @@ -19454,7 +19659,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); @@ -19463,7 +19668,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && + "Function requires or " ); if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING @@ -19478,21 +19684,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); } VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); } VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); } @@ -19500,7 +19708,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast( m_commandBuffer ), firstAttachment, @@ -19513,7 +19722,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), firstAttachment, @@ -19525,7 +19734,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && + "Function requires or " ); getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast( m_commandBuffer ), firstAttachment, @@ -19536,7 +19746,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); } @@ -19545,7 +19755,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( conservativeRasterizationMode ) ); @@ -19554,14 +19764,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); } VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); } @@ -19569,7 +19780,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); } @@ -19579,7 +19790,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), firstAttachment, @@ -19591,7 +19802,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); @@ -19601,7 +19812,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); @@ -19609,7 +19820,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && + "Function requires or " ); getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); } @@ -19617,7 +19829,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); } @@ -19625,7 +19837,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); } @@ -19634,7 +19846,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && + "Function requires or " ); getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast( m_commandBuffer ), firstViewport, @@ -19645,7 +19858,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); } @@ -19653,7 +19866,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); } @@ -19662,7 +19875,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); @@ -19672,7 +19885,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); @@ -19682,7 +19895,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() ); @@ -19691,7 +19904,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); } @@ -19700,7 +19913,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); @@ -19710,7 +19923,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); @@ -19812,6 +20025,63 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &executeInfo ) ); } + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs( *this, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderEXT + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ShaderEXT::getBinaryData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function requires " ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = getDispatcher()->vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( m_shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetShaderBinaryDataEXT( + static_cast( m_device ), static_cast( m_shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function requires " ); + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } + + getDispatcher()->vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } + //=== VK_QCOM_tile_properties === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const diff --git a/vulkan/vulkan_static_assertions.hpp b/vulkan/vulkan_static_assertions.hpp index cf4595a..c3ea36c 100644 --- a/vulkan/vulkan_static_assertions.hpp +++ b/vulkan/vulkan_static_assertions.hpp @@ -5780,6 +5780,22 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_shader_tile_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_opacity_micromap === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" ); @@ -6321,6 +6337,30 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_shader_object === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderCreateInfoEXT is not nothrow_move_constructible!" ); + //=== VK_QCOM_tile_properties === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), diff --git a/vulkan/vulkan_structs.hpp b/vulkan/vulkan_structs.hpp index e5d8f96..2f395a4 100644 --- a/vulkan/vulkan_structs.hpp +++ b/vulkan/vulkan_structs.hpp @@ -73700,6 +73700,190 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; }; + struct PhysicalDeviceShaderObjectFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderObject( shaderObject_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT + { + shaderObject = shaderObject_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderObject ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectFeaturesEXT; + }; + + struct PhysicalDeviceShaderObjectPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array const & shaderBinaryUUID_ = {}, + uint32_t shaderBinaryVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderBinaryUUID( shaderBinaryUUID_ ) + , shaderBinaryVersion( shaderBinaryVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) && + ( shaderBinaryVersion == rhs.shaderBinaryVersion ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderBinaryUUID = {}; + uint32_t shaderBinaryVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectPropertiesEXT; + }; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV { using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; @@ -74187,6 +74371,226 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + struct PhysicalDeviceShaderTileImageFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderTileImageColorReadAccess( shaderTileImageColorReadAccess_ ) + , shaderTileImageDepthReadAccess( shaderTileImageDepthReadAccess_ ) + , shaderTileImageStencilReadAccess( shaderTileImageStencilReadAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) && + ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) && + ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImageFeaturesEXT; + }; + + struct PhysicalDeviceShaderTileImagePropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderTileImageCoherentReadAccelerated( shaderTileImageCoherentReadAccelerated_ ) + , shaderTileImageReadSampleFromPixelRateInvocation( shaderTileImageReadSampleFromPixelRateInvocation_ ) + , shaderTileImageReadFromHelperInvocation( shaderTileImageReadFromHelperInvocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) && + ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) && + ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImagePropertiesEXT; + }; + struct PhysicalDeviceShadingRateImageFeaturesNV { using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; @@ -83706,6 +84110,7 @@ namespace VULKAN_HPP_NAMESPACE }; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PipelineTessellationDomainOriginStateCreateInfo { @@ -94373,6 +94778,321 @@ namespace VULKAN_HPP_NAMESPACE uint32_t data = {}; }; + struct ShaderCreateInfoEXT + { + using NativeType = VkShaderCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {}, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary, + size_t codeSize_ = {}, + const void * pCode_ = {}, + const char * pName_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + , pName( pName_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const char * pName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( code_.size() * sizeof( T ) ) + , pCode( code_.data() ) + , pName( pName_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT + { + nextStage = nextStage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT + { + codeType = codeType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * sizeof( T ); + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stage, + nextStage, + codeType, + codeSize, + pCode, + pName, + setLayoutCount, + pSetLayouts, + pushConstantRangeCount, + pPushConstantRanges, + pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 ) + return cmp; + if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 ) + return cmp; + if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 ) + return cmp; + if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 ) + return cmp; + if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 ) + return cmp; + if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 ) + return cmp; + if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 ) + return cmp; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) && + ( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {}; + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary; + size_t codeSize = {}; + const void * pCode = {}; + const char * pName = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + + template <> + struct CppType + { + using Type = ShaderCreateInfoEXT; + }; + struct ShaderModuleCreateInfo { using NativeType = VkShaderModuleCreateInfo; diff --git a/vulkan/vulkan_to_string.hpp b/vulkan/vulkan_to_string.hpp index 525a47f..59ecb39 100644 --- a/vulkan/vulkan_to_string.hpp +++ b/vulkan/vulkan_to_string.hpp @@ -3206,6 +3206,32 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderCreateFlagBitsEXT::eLinkStage ) + result += "LinkStage | "; + if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize ) + result += "AllowVaryingSubgroupSize | "; + if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups ) + result += "RequireFullSubgroups | "; + if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader ) + result += "NoTaskShader | "; + if ( value & ShaderCreateFlagBitsEXT::eDispatchBase ) + result += "DispatchBase | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment ) + result += "FragmentShadingRateAttachment | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment ) + result += "FragmentDensityMapAttachment | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //======================= //=== ENUMs to_string === //======================= @@ -3277,6 +3303,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; + case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4019,6 +4046,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT"; case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT"; case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT"; case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT"; case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT"; case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT"; @@ -4081,6 +4110,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; + case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT"; case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM"; case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; @@ -4162,6 +4194,7 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_FUCHSIA*/ case ObjectType::eMicromapEXT: return "MicromapEXT"; case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; + case ObjectType::eShaderEXT: return "ShaderEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8313,6 +8346,33 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value ) + { + switch ( value ) + { + case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage"; + case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups"; + case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader"; + case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase"; + case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderCodeTypeEXT value ) + { + switch ( value ) + { + case ShaderCodeTypeEXT::eBinary: return "Binary"; + case ShaderCodeTypeEXT::eSpirv: return "Spirv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_NV_ray_tracing_invocation_reorder === VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value ) diff --git a/vulkan/vulkansc.hpp b/vulkan/vulkansc.hpp index d6e0d7d..d34b9c8 100644 --- a/vulkan/vulkansc.hpp +++ b/vulkan/vulkansc.hpp @@ -5420,6 +5420,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> struct StructExtends { diff --git a/vulkan/vulkansc_handles.hpp b/vulkan/vulkansc_handles.hpp index f88b969..7f676bf 100644 --- a/vulkan/vulkansc_handles.hpp +++ b/vulkan/vulkansc_handles.hpp @@ -410,6 +410,7 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PhysicalDeviceInlineUniformBlockFeatures; using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; struct PhysicalDeviceInlineUniformBlockProperties; diff --git a/vulkan/vulkansc_raii.hpp b/vulkan/vulkansc_raii.hpp index 3c431ef..65de6fe 100644 --- a/vulkan/vulkansc_raii.hpp +++ b/vulkan/vulkansc_raii.hpp @@ -8506,14 +8506,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && + "Function requires or or " ); getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && + "Function requires or or " ); getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } @@ -8521,7 +8523,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } @@ -8530,7 +8532,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); @@ -8540,7 +8542,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); @@ -8553,7 +8555,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 && - "Function requires or " ); + "Function requires or or " ); if ( buffers.size() != offsets.size() ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); @@ -8579,7 +8581,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } @@ -8587,7 +8589,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } @@ -8595,7 +8597,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } @@ -8603,7 +8605,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } @@ -8611,7 +8613,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } @@ -8622,7 +8624,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && "Function requires or " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && + "Function requires or or " ); getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), static_cast( faceMask ), @@ -8635,7 +8638,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } @@ -8643,7 +8646,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } @@ -8651,7 +8654,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } @@ -9911,7 +9914,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } @@ -9919,7 +9922,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } @@ -9927,7 +9930,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } @@ -9936,7 +9939,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); @@ -9946,7 +9949,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); @@ -9960,7 +9963,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && - "Function requires or " ); + "Function requires or or " ); if ( buffers.size() != offsets.size() ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); @@ -9986,7 +9989,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } @@ -9994,7 +9997,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } @@ -10002,7 +10005,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } @@ -10010,7 +10013,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } @@ -10018,7 +10021,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } @@ -10030,7 +10033,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), static_cast( faceMask ), @@ -10235,7 +10238,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && + "Function requires or " ); getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), vertexBindingDescriptions.size(), @@ -10379,15 +10383,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function requires or or " ); getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } @@ -10395,22 +10400,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function requires or " ); getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); } VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function requires or or " ); getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } diff --git a/vulkan/vulkansc_structs.hpp b/vulkan/vulkansc_structs.hpp index 60c2d11..1331769 100644 --- a/vulkan/vulkansc_structs.hpp +++ b/vulkan/vulkansc_structs.hpp @@ -45371,6 +45371,7 @@ namespace VULKAN_HPP_NAMESPACE }; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PipelineTessellationDomainOriginStateCreateInfo {