diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 68b42af..1154081 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -89,7 +89,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h #endif -static_assert( VK_HEADER_VERSION == 154 , "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 155 , "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -8363,6 +8363,7 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, @@ -8940,6 +8941,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD"; case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT : return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT"; case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT"; case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT"; @@ -34714,981 +34716,796 @@ namespace VULKAN_HPP_NAMESPACE } #endif - template - VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const & counterBuffers, ArrayProxy const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const &descriptorSets, ArrayProxy const &dynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const & descriptorSets, ArrayProxy const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void bindVertexBuffers( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, ArrayProxy const &strides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void buildAccelerationStructureKHR( ArrayProxy const & infos, ArrayProxy const & pOffsetInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( ArrayProxy const &attachments, ArrayProxy const &rects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void clearAttachments( ArrayProxy const & attachments, ArrayProxy const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const &ranges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const & counterBuffers, ArrayProxy const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void executeCommands( ArrayProxy const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const & memoryBarriers, ArrayProxy const & bufferMemoryBarriers, ArrayProxy const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const &values, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const &descriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setBlendConstants( const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const &customSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDeviceMask( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const &discardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const &exclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setLineWidth( float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, ArrayProxy const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setScissor( uint32_t firstScissor, ArrayProxy const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissorWithCountEXT( ArrayProxy const &scissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setScissorWithCountEXT( ArrayProxy const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, ArrayProxy const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setViewport( uint32_t firstViewport, ArrayProxy const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const &shadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const &viewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWithCountEXT( ArrayProxy const &viewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void setViewportWithCountEXT( ArrayProxy const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void waitEvents( ArrayProxy const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const & memoryBarriers, ArrayProxy const & bufferMemoryBarriers, ArrayProxy const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( ArrayProxy const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesKHR( ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void writeAccelerationStructuresPropertiesNV( ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT { return m_commandBuffer; @@ -43534,14 +43351,15 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getCheckpointDataNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -43551,19 +43369,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindSparse( ArrayProxy const &bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -43573,14 +43388,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -43588,14 +43405,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type submit( ArrayProxy const &submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT { return m_queue; @@ -47462,16 +47281,18 @@ namespace VULKAN_HPP_NAMESPACE } #endif + #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -47493,11 +47314,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -47506,12 +47328,12 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -47521,12 +47343,12 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -47557,14 +47379,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -47579,14 +47403,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy const &bindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -47610,14 +47436,16 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS template VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; @@ -47708,14 +47536,14 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -47817,14 +47645,14 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -47914,14 +47742,14 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -47934,14 +47762,14 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD ResultValue,Allocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -48041,14 +47869,14 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #ifndef VULKAN_HPP_NO_SMART_HANDLE template>> VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template>, typename B = Allocator, typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy const &createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -48076,18 +47904,20 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS @@ -48100,417 +47930,473 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + + template + VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -48520,18 +48406,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy const &memoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -48548,18 +48436,20 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy const &descriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS @@ -48573,17 +48463,16 @@ namespace VULKAN_HPP_NAMESPACE template - VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template void getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; @@ -48720,12 +48609,13 @@ namespace VULKAN_HPP_NAMESPACE StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -48887,7 +48777,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -48896,7 +48786,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -48905,7 +48795,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -48992,7 +48882,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49008,7 +48898,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49017,7 +48907,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49026,7 +48916,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49035,7 +48925,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49047,59 +48937,55 @@ namespace VULKAN_HPP_NAMESPACE template - VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -49150,7 +49036,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49166,7 +49052,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -49183,46 +49069,51 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -49275,56 +49166,62 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -49332,134 +49229,104 @@ namespace VULKAN_HPP_NAMESPACE typename ResultValueType::type resetFences( ArrayProxy const &fences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + + template + void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( ArrayProxy const &swapchains, ArrayProxy const &metadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void setHdrMetadataEXT( ArrayProxy const & swapchains, ArrayProxy const & metadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; -#else - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSets( ArrayProxy const &descriptorWrites, ArrayProxy const &descriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void updateDescriptorSets( ArrayProxy const & descriptorWrites, ArrayProxy const & descriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53911,7 +53778,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53920,24 +53787,20 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B = Allocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const; - template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy const &counters, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if < std::is_same::value && std::is_same::value, int>::type = 0> + template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if < std::is_same::value && std::is_same::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -53947,7 +53810,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53956,7 +53819,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53979,7 +53842,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53988,7 +53851,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -53997,7 +53860,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_DIRECTFB_EXT @@ -54015,7 +53878,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54024,7 +53887,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54033,7 +53896,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54042,7 +53905,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54207,7 +54070,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54248,7 +54111,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getQueueFamilyProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54257,11 +54120,11 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getQueueFamilyProperties2(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54270,11 +54133,11 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getQueueFamilyProperties2KHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54283,7 +54146,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54292,7 +54155,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54301,7 +54164,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54310,7 +54173,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54342,7 +54205,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54351,7 +54214,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WIN32_KHR @@ -54361,7 +54224,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -54371,7 +54234,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -54387,7 +54250,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_USE_PLATFORM_WAYLAND_KHR @@ -54436,14 +54299,16 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else - template - typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT { return m_physicalDevice; @@ -69110,6 +68975,101 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT + : shaderImageInt64Atomics( shaderImageInt64Atomics_ ), sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } +#endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast( this ), &rhs, sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) ); + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } + + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) + && ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + + }; + static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + struct PhysicalDeviceShaderImageFootprintFeaturesNV { static const bool allowDuplicate = false; @@ -79870,60 +79830,68 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -79932,7 +79900,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -79941,7 +79909,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -79950,7 +79918,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -79960,11 +79928,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT; + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT @@ -80035,7 +80004,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -80044,7 +80013,7 @@ namespace VULKAN_HPP_NAMESPACE template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); + VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -80199,1876 +80168,1540 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); + Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const & counterBuffers, ArrayProxy const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); #else - if ( counterBuffers.size() != counterBufferOffsets.size() ) + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const &descriptorSets, ArrayProxy const &dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy const & descriptorSets, ArrayProxy const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size(), reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, ArrayProxy const & sizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); #else if ( buffers.size() != offsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else - if ( buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else - if ( offsets.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); + + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ), reinterpret_cast( pStrides ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const & buffers, ArrayProxy const & offsets, ArrayProxy const & sizes, ArrayProxy const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); #else if ( buffers.size() != offsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ), reinterpret_cast( strides.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ), reinterpret_cast( pStrides ) ); + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy const &buffers, ArrayProxy const &offsets, ArrayProxy const &sizes, ArrayProxy const &strides, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); -#else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); -#else - if ( buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == strides.size() ); -#else - if ( buffers.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); -#else - if ( offsets.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != sizes.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( offsets.size() == strides.size() ); -#else - if ( offsets.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: offsets.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( sizes.size() == strides.size() ); -#else - if ( sizes.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers2EXT: sizes.size() != strides.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ), reinterpret_cast( strides.data() ) ); + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size(), reinterpret_cast( regions.data() ), static_cast( filter ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureIndirectKHR( const AccelerationStructureBuildGeometryInfoKHR & info, VULKAN_HPP_NAMESPACE::Buffer indirectBuffer, VULKAN_HPP_NAMESPACE::DeviceSize indirectOffset, uint32_t indirectStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); + d.vkCmdBuildAccelerationStructureIndirectKHR( m_commandBuffer, reinterpret_cast( &info ), static_cast( indirectBuffer ), static_cast( indirectOffset ), indirectStride ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildOffsetInfoKHR* const * ppOffsetInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppOffsetInfos ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy const &infos, ArrayProxy const &pOffsetInfos, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureKHR( ArrayProxy const & infos, ArrayProxy const & pOffsetInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( infos.size() == pOffsetInfos.size() ); #else if ( infos.size() != pOffsetInfos.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructureKHR: infos.size() != pOffsetInfos.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size() , reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); + + d.vkCmdBuildAccelerationStructureKHR( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ), reinterpret_cast( pOffsetInfos.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); + d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy const &attachments, ArrayProxy const &rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy const & attachments, ArrayProxy const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); + d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast( attachments.data() ), rects.size(), reinterpret_cast( rects.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size(), reinterpret_cast( ranges.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const &ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size(), reinterpret_cast( ranges.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size(), reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size(), reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size(), reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size(), reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdEndRenderPass( m_commandBuffer ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass( m_commandBuffer ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const &counterBuffers, ArrayProxy const &counterBufferOffsets, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy const & counterBuffers, ArrayProxy const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); #else - if ( counterBuffers.size() != counterBufferOffsets.size() ) + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy const & memoryBarriers, ArrayProxy const & bufferMemoryBarriers, ArrayProxy const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size(), reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const &values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ), reinterpret_cast( values.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const &descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy const & descriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size(), reinterpret_cast( descriptorWrites.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const ®ions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size(), reinterpret_cast( regions.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const &customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy const & customSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size(), reinterpret_cast( customSampleOrders.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const &discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast( discardRectangles.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const &exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy const & exclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast( exclusiveScissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy const &scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size() , reinterpret_cast( scissors.data() ) ); + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), static_cast( depthFailOp ), static_cast( compareOp ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const & shadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast( shadingRatePalettes.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy const &shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const & viewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast( viewportWScalings.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy const &viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy const &viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size() , reinterpret_cast( viewports.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), static_cast( buffer ), static_cast( offset ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedBufferRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), reinterpret_cast( pCallableShaderBindingTable ), width, height, depth ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedBufferRegionKHR & raygenShaderBindingTable, const StridedBufferRegionKHR & missShaderBindingTable, const StridedBufferRegionKHR & hitShaderBindingTable, const StridedBufferRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); + d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast( &raygenShaderBindingTable ), reinterpret_cast( &missShaderBindingTable ), reinterpret_cast( &hitShaderBindingTable ), reinterpret_cast( &callableShaderBindingTable ), width, height, depth ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); + } + + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const &data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ), reinterpret_cast( data.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy const &events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const &memoryBarriers, ArrayProxy const &bufferMemoryBarriers, ArrayProxy const &imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy const & memoryBarriers, ArrayProxy const & bufferMemoryBarriers, ArrayProxy const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size(), reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast( imageMemoryBarriers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size(), reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size(), reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); } -#else - template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end( Dispatch const & d ) const { Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const { Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -82114,17 +81747,19 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const { - Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" ); + Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -82304,21 +81939,23 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const { Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -82347,21 +81984,23 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const { Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -82414,21 +82053,23 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const { Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -83643,31 +83284,35 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); + Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -83688,776 +83333,889 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VK_ENABLE_BETA_EXTENSIONS - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); #else - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VK_ENABLE_BETA_EXTENSIONS - d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureKHR( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); #else - d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDeferredOperationKHR( m_device, static_cast( operation ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkDeviceWaitIdle( m_device ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle( Dispatch const & d ) const { Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); + Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -84475,29 +84233,33 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const &commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -84529,29 +84291,33 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -84572,13 +84338,13 @@ namespace VULKAN_HPP_NAMESPACE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, ArrayProxy const &data, Dispatch const &d ) const { @@ -84587,27 +84353,24 @@ namespace VULKAN_HPP_NAMESPACE } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); std::vector data( dataSize / sizeof( T ) ); - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, reinterpret_cast( data.data() ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Dispatch const & d ) const { T data; - Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); - + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), sizeof( T ), reinterpret_cast( &data ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoKHR* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT @@ -84909,18 +84672,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_ENABLE_BETA_EXTENSIONS - template - VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionKHR* version, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); + return static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( version ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionKHR & version, Dispatch const & d ) const { - Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureCompatibilityKHR" ); + Result result = static_cast( d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast( &version ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureCompatibilityKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -85779,13 +85544,13 @@ namespace VULKAN_HPP_NAMESPACE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); + return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const { @@ -85794,37 +85559,34 @@ namespace VULKAN_HPP_NAMESPACE } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const { VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); std::vector data( dataSize / sizeof( T ) ); - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const { T data; - Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, 1 * sizeof( T ) , reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); - - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, sizeof( T ), reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { Result::eSuccess, Result::eNotReady } ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const { @@ -85833,38 +85595,35 @@ namespace VULKAN_HPP_NAMESPACE } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); std::vector data( dataSize / sizeof( T ) ); - Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, reinterpret_cast( data.data() ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const { T data; - Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); - - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - #ifdef VK_ENABLE_BETA_EXTENSIONS template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const { @@ -85873,36 +85632,33 @@ namespace VULKAN_HPP_NAMESPACE } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); std::vector data( dataSize / sizeof( T ) ); - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, reinterpret_cast( data.data() ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const { T data; - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); - - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.") VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy const &data, Dispatch const &d ) const { @@ -85911,27 +85667,24 @@ namespace VULKAN_HPP_NAMESPACE } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 ); std::vector data( dataSize / sizeof( T ) ); - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); - + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, reinterpret_cast( data.data() ) ) ); return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); } - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const { T data; - Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, 1 * sizeof( T ) , reinterpret_cast( &data ) ) ); - - return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast( &data ) ) ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -86210,77 +85963,87 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); + Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); + Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); + Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); + Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -86393,97 +86156,100 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_WIN32_KHR #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const { Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkReleaseProfilingLockKHR( m_device ); } -#else - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkReleaseProfilingLockKHR( m_device ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const { Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const { Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const { Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -86498,249 +86264,199 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); } -#else - template - VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); } -#else - template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); + Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); - } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const - { - Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const { Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy const &swapchains, ArrayProxy const &metadata, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy const & swapchains, ArrayProxy const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); #else if ( swapchains.size() != metadata.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); } #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); + + d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); } -#else - template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d ) const { Result result = static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setPrivateDataEXT" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" ); + Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { - Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" ); + Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); } -#else - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUninitializePerformanceApiINTEL( m_device ); } -#else - template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUninitializePerformanceApiINTEL( m_device ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUnmapMemory( m_device, static_cast( memory ) ); } -#else - template - VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUnmapMemory( m_device, static_cast( memory ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); } -#else - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); + d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy const &descriptorWrites, ArrayProxy const &descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy const & descriptorWrites, ArrayProxy const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); + d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast( descriptorCopies.data() ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -87242,115 +86958,123 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_XLIB_KHR*/ - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); -#else - if ( layerPrefix.size() != message.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); - } -#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -87523,16 +87247,18 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -87698,10 +87424,10 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), reinterpret_cast( pCounterDescriptions ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast< VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast< VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -87755,7 +87481,7 @@ namespace VULKAN_HPP_NAMESPACE } - template + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const { std::pair, std::vector> enumeratedData; @@ -87780,7 +87506,7 @@ namespace VULKAN_HPP_NAMESPACE return createResultValue( result, enumeratedData, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); } - template ::value && std::is_same::value, int>::type > + template ::value && std::is_same::value, int>::type > VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & countersAllocator, PerformanceCounterDescriptionKHRAllocator & counterDescriptionsAllocator, Dispatch const & d ) const { std::pair, std::vector> enumeratedData( std::piecewise_construct, std::forward_as_tuple( countersAllocator ), std::forward_as_tuple( counterDescriptionsAllocator ) ); @@ -89515,21 +89241,23 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); } #else - template - VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -89558,16 +89286,18 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); } + #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -89585,30 +89315,25 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#else - template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT - { - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -89626,21 +89351,23 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const { Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT { @@ -89655,20 +89382,22 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { return static_cast( d.vkQueueWaitIdle( m_queue ) ); } #else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle( Dispatch const & d ) const { Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); - return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + #ifdef VK_USE_PLATFORM_ANDROID_KHR template <> struct StructExtends{ enum { value = true }; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -89923,6 +89652,8 @@ namespace VULKAN_HPP_NAMESPACE template <> struct StructExtends{ enum { value = true }; }; template <> struct StructExtends{ enum { value = true }; }; template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; + template <> struct StructExtends{ enum { value = true }; }; template <> struct StructExtends{ enum { value = true }; }; template <> struct StructExtends{ enum { value = true }; }; template <> struct StructExtends{ enum { value = true }; }; diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index ac904bc..7edfe0a 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -43,7 +43,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 154 +#define VK_HEADER_VERSION 155 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) @@ -531,6 +531,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, @@ -10233,6 +10234,18 @@ typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + #define VK_EXT_memory_budget 1 #define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 #define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" diff --git a/registry/validusage.json b/registry/validusage.json index 0e12e85..01b8a37 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.2.154", - "comment": "from git branch: github-main commit: 5c8259673b722a86525f5a9dbee1ffd92371656b", - "date": "2020-09-20 10:24:47Z" + "api version": "1.2.155", + "comment": "from git branch: github-main commit: 2ffe5b06dd338347636c961399eaca384b864d60", + "date": "2020-09-28 05:04:42Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -516,7 +516,7 @@ }, { "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfoEXT, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImageRobustnessFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRayTracingFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfoEXT, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImageRobustnessFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePrivateDataFeaturesEXT, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRayTracingFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkanMemoryModelFeatures, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" }, { "vuid": "VUID-VkDeviceCreateInfo-sType-unique", @@ -21208,7 +21208,11 @@ }, { "vuid": "VUID-vkCmdDraw-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDraw-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDraw-renderPass-02684", @@ -21297,6 +21301,32 @@ "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDraw-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDraw-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDraw-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDraw-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688", @@ -21416,7 +21446,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndexed-renderPass-02684", @@ -21509,6 +21543,32 @@ "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688", @@ -21628,7 +21688,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndirect-renderPass-02684", @@ -21757,6 +21821,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688", @@ -21888,7 +21978,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndirectCount-renderPass-02684", @@ -22005,6 +22099,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndirectCount-maxMultiviewInstanceIndex-02688", @@ -22160,7 +22280,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-renderPass-02684", @@ -22289,6 +22413,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688", @@ -22424,7 +22574,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-renderPass-02684", @@ -22541,6 +22695,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-maxMultiviewInstanceIndex-02688", @@ -22696,7 +22876,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684", @@ -22781,6 +22965,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688", @@ -23014,7 +23224,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-renderPass-02684", @@ -23071,6 +23285,32 @@ "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688", @@ -23208,7 +23448,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684", @@ -23297,6 +23541,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688", @@ -23450,7 +23720,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684", @@ -23555,6 +23829,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688", @@ -25976,7 +26276,11 @@ }, { "vuid": "VUID-vkCmdDispatch-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatch-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDispatch-groupCountX-00386", @@ -26048,6 +26352,32 @@ "vuid": "VUID-vkCmdDispatch-commandBuffer-02713", "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatch-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatch-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatch-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatch-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, "vkCmdDispatchIndirect": { @@ -26106,7 +26436,11 @@ }, { "vuid": "VUID-vkCmdDispatchIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDispatchIndirect-buffer-02708", @@ -26186,6 +26520,32 @@ "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02711", "text": " commandBuffer must not be a protected command buffer" } + ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } ] }, "VkDispatchIndirectCommand": { @@ -26260,7 +26620,11 @@ }, { "vuid": "VUID-vkCmdDispatchBase-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdDispatchBase-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00421", @@ -26325,6 +26689,32 @@ "text": " If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdDispatchBase-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdDispatchBase-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdDispatchBase-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_device_group)": [ { "vuid": "VUID-vkCmdDispatchBase-commandBuffer-parameter", @@ -26728,7 +27118,11 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-renderPass-02684", @@ -26809,6 +27203,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_VERSION_1_1,VK_KHR_multiview)": [ { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-maxMultiviewInstanceIndex-02688", @@ -30222,7 +30642,11 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdTraceRaysNV-None-03429", @@ -30359,6 +30783,32 @@ "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_NV_ray_tracing)": [ { "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-parameter", @@ -30454,7 +30904,11 @@ }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-03429", @@ -30639,6 +31093,32 @@ "text": " If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { "vuid": "VUID-vkCmdTraceRaysKHR-commandBuffer-parameter", @@ -30746,7 +31226,11 @@ }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format." }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-03429", @@ -30935,6 +31419,32 @@ "text": " commandBuffer must not be a protected command buffer" } ], + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit channel width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a channel width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command." + } + ], "(VK_NV_ray_tracing,VK_KHR_ray_tracing)+(VK_KHR_ray_tracing)": [ { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-commandBuffer-parameter", @@ -32192,6 +32702,14 @@ } ] }, + "VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT": { + "(VK_EXT_shader_image_atomic_int64)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT" + } + ] + }, "VkPhysicalDevice8BitStorageFeatures": { "(VK_VERSION_1_2,VK_KHR_8bit_storage)": [ { diff --git a/registry/vk.xml b/registry/vk.xml index 179d9b0..6487ad2 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -135,7 +135,7 @@ branch of the member gitlab server. // Vulkan 1.2 version number #define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 154 +#define VK_HEADER_VERSION 155 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION) @@ -4985,6 +4985,12 @@ typedef void CAMetalLayer; uint32_t regionCount const VkImageResolve2KHR* pRegions + + VkStructureType sType + void* pNext + VkBool32 shaderImageInt64Atomics + VkBool32 sparseImageInt64Atomics + Vulkan enumerant (token) definitions @@ -13035,10 +13041,12 @@ typedef void CAMetalLayer; - + - - + + + +